AccessDenied<\/Code>Access Denied<\/Message>70Y9R36XNPEQXMGV<\/RequestId>G6F5AK4qo7JdaEdKGMtS0P6gdLPeFOdEfSEfvTOZEfk9km0\/jAfp08QLfKSTFFj1oWIKoAoBehM=<\/HostId><\/Error>\r\n```\r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.14.0\r\n- Platform: macOS Catalina 10.15.7\r\n- Python version: 3.7.12\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3361\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3361\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3360","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3360\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3360\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3360\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3360","id":1068724697,"node_id":"PR_kwDODunzps4vQ_16","number":3360,"title":"Add The Pile USPTO subset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1638382085000,"updated_at":1638531929000,"closed_at":1638531928000,"author_association":"MEMBER","active_lock_reason":null,"body":"Add:\r\n- USPTO subset of The Pile: \"uspto\" config\r\n\r\nClose bigscience-workshop\/data_tooling#297.\r\n\r\nCC: @StellaAthena","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3360\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3360\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3360","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3360","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3360.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3360.patch","merged_at":1638531927000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3359","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3359\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3359\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3359\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3359","id":1068638213,"node_id":"PR_kwDODunzps4vQtI0","number":3359,"title":"Add The Pile Free Law subset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["@albertvillanova Is there a specific reason you\u2019re adding the Pile under \u201cthe\u201d instead of under \u201cpile\u201d? That does not appear to be consistent with other datasets.","Hi @StellaAthena,\r\n\r\nI asked myself the same question, but at the end I decided to be consistent with previously added Pile subsets:\r\n- #2817\r\n\r\nI guess the reason is to stress that the definite article is always used before the name of the dataset (your site says: \"The Pile. An 800GB Dataset of Diverse Text for Language Modeling\"). Other datasets are not usually preceded by the definite article, like \"the SQuAD\" or \"the GLUE\" or \"the Common Voice\"...\r\n\r\nCC: @lhoestq ","> I guess the reason is to stress that the definite article is always used before the name of the dataset (your site says: \"The Pile. An 800GB Dataset of Diverse Text for Language Modeling\").\r\n\r\nYes that's because of this that it starts with \"the\""],"created_at":1638377164000,"updated_at":1638785537000,"closed_at":1638379844000,"author_association":"MEMBER","active_lock_reason":null,"body":"Add:\r\n- Free Law subset of The Pile: \"free_law\" config\r\n\r\nClose bigscience-workshop\/data_tooling#75.\r\n\r\nCC: @StellaAthena","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3359\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3359\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3359","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3359","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3359.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3359.patch","merged_at":1638379843000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3358","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3358\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3358\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3358\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3358","id":1068623216,"node_id":"I_kwDODunzps4_seVw","number":3358,"title":"add new field, and get errors","user":{"login":"yanllearnn","id":38966558,"node_id":"MDQ6VXNlcjM4OTY2NTU4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38966558?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yanllearnn","html_url":"https:\/\/github.com\/yanllearnn","followers_url":"https:\/\/api.github.com\/users\/yanllearnn\/followers","following_url":"https:\/\/api.github.com\/users\/yanllearnn\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yanllearnn\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yanllearnn\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yanllearnn\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yanllearnn\/orgs","repos_url":"https:\/\/api.github.com\/users\/yanllearnn\/repos","events_url":"https:\/\/api.github.com\/users\/yanllearnn\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yanllearnn\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi, \r\n\r\ncould you please post this question on our [Forum](https:\/\/discuss.huggingface.co\/) as we keep issues for bugs and feature requests? ","> Hi,\r\n> \r\n> could you please post this question on our [Forum](https:\/\/discuss.huggingface.co\/) as we keep issues for bugs and feature requests?\r\n\r\nok."],"created_at":1638376538000,"updated_at":1638411982000,"closed_at":1638411982000,"author_association":"NONE","active_lock_reason":null,"body":"after adding new field **tokenized_examples[\"example_id\"]**, and get errors below,\r\nI think it is due to changing data to tensor, and **tokenized_examples[\"example_id\"]** is string list \r\n**all fields**\r\n```\r\n***************** train_dataset 1: Dataset({\r\n features: ['attention_mask', 'end_positions', 'example_id', 'input_ids', 'start_positions', 'token_type_ids'],\r\n num_rows: 87714\r\n})\r\n```\r\n\r\n**Errors**\r\n```\r\nTraceback (most recent call last):\r\n File \"\/usr\/local\/lib\/python3.7\/site-packages\/transformers\/tokenization_utils_base.py\", line 705, in convert_to_tensors\r\n tensor = as_tensor(value)\r\nValueError: too many dimensions 'str'\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3358\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3358\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3357","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3357\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3357\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3357\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3357","id":1068607382,"node_id":"PR_kwDODunzps4vQmcL","number":3357,"title":"Update README.md","user":{"login":"apergo-ai","id":68908804,"node_id":"MDQ6VXNlcjY4OTA4ODA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/68908804?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/apergo-ai","html_url":"https:\/\/github.com\/apergo-ai","followers_url":"https:\/\/api.github.com\/users\/apergo-ai\/followers","following_url":"https:\/\/api.github.com\/users\/apergo-ai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/apergo-ai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/apergo-ai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/apergo-ai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/apergo-ai\/orgs","repos_url":"https:\/\/api.github.com\/users\/apergo-ai\/repos","events_url":"https:\/\/api.github.com\/users\/apergo-ai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/apergo-ai\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1638375646000,"updated_at":1638375646000,"closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"After having worked a bit with the dataset.\r\nAs far as I know, it is solely in English (en-US). There are only a few mails in Spanish, French or German (less than a dozen I would estimate).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3357\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3357\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3357","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3357","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3357.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3357.patch","merged_at":null},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3356","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3356\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3356\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3356\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3356","id":1068503932,"node_id":"PR_kwDODunzps4vQQLD","number":3356,"title":"to_tf_dataset() refactor","user":{"login":"Rocketknight1","id":12866554,"node_id":"MDQ6VXNlcjEyODY2NTU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12866554?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Rocketknight1","html_url":"https:\/\/github.com\/Rocketknight1","followers_url":"https:\/\/api.github.com\/users\/Rocketknight1\/followers","following_url":"https:\/\/api.github.com\/users\/Rocketknight1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Rocketknight1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Rocketknight1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Rocketknight1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Rocketknight1\/orgs","repos_url":"https:\/\/api.github.com\/users\/Rocketknight1\/repos","events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Also, please don't merge yet - I need to make sure all the code samples and notebooks have a collate_fn specified, since we're removing the ability for this method to work without one!","Hi @lhoestq @mariosasko, the other PRs this was depending on in Transformers and huggingface\/notebooks are now merged, so this is ready to go. Do you want to take one more look at it, or are you happy at this point?","The documentation for the method is fine, it doesn't need to be changed, but the tutorial notebook definitely looks a little out of date. Let me see what I can do!","@lhoestq I rewrote the last bit of the notebook - let me know what you think!","Cool thank you ! It's much nicer that what we had :)\r\n\r\nI also spotted other things I'd like to update in the notebook (especially the beginning) but it can be fixed later"],"created_at":1638370470000,"updated_at":1639045613000,"closed_at":1639045613000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"This is the promised cleanup to `to_tf_dataset()` now that the course is out of the way! The main changes are:\r\n\r\n- A collator is always required (there was way too much hackiness making things like labels work without it)\r\n- Lots of cleanup and a lot of code moved to `_get_output_signature`\r\n- Should now handle it gracefully when the data collator adds unexpected columns","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3356\/reactions","total_count":3,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":3,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3356\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3356","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3356","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3356.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3356.patch","merged_at":1639045613000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3355","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3355\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3355\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3355\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3355","id":1068468573,"node_id":"PR_kwDODunzps4vQIoy","number":3355,"title":"Extend support for streaming datasets that use pd.read_excel","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["TODO in the future: https:\/\/github.com\/huggingface\/datasets\/pull\/3355#discussion_r761138011\r\n- If we finally find a use case where the `pd.read_excel()` can work in streaming mode (using fsspec), that is, without using the `.read()`, I propose to try this first, catch the ValueError and then try with `.read`, but all implemented in `xpandas_read_excel`. "],"created_at":1638368563000,"updated_at":1639725859000,"closed_at":1639725858000,"author_association":"MEMBER","active_lock_reason":null,"body":"This PR fixes error:\r\n```\r\nValueError: Cannot seek streaming HTTP file\r\n```\r\n\r\nCC: @severo ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3355\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3355\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3355","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3355","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3355.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3355.patch","merged_at":1639725858000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3354","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3354\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3354\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3354\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3354","id":1068307271,"node_id":"PR_kwDODunzps4vPl9d","number":3354,"title":"Remove duplicate name from dataset cards","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1638359140000,"updated_at":1638364470000,"closed_at":1638364469000,"author_association":"MEMBER","active_lock_reason":null,"body":"Remove duplicate name from dataset card for:\r\n- ajgt_twitter_ar\r\n- emotone_ar","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3354\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3354\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3354","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3354","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3354.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3354.patch","merged_at":1638364469000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3353","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3353\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3353\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3353\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3353","id":1068173783,"node_id":"I_kwDODunzps4_qwnX","number":3353,"title":" add one field \"example_id\", but I can't see it in the \"comput_loss\" function","user":{"login":"yanllearnn","id":38966558,"node_id":"MDQ6VXNlcjM4OTY2NTU4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38966558?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yanllearnn","html_url":"https:\/\/github.com\/yanllearnn","followers_url":"https:\/\/api.github.com\/users\/yanllearnn\/followers","following_url":"https:\/\/api.github.com\/users\/yanllearnn\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yanllearnn\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yanllearnn\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yanllearnn\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yanllearnn\/orgs","repos_url":"https:\/\/api.github.com\/users\/yanllearnn\/repos","events_url":"https:\/\/api.github.com\/users\/yanllearnn\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yanllearnn\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi ! Your function looks fine, I used to map `squad` locally and it indeed added the `example_id` field correctly.\r\n\r\nHowever I think that in the `compute_loss` method only a subset of the fields are available: the model inputs. Since `example_id` is not a model input (it's not passed as a parameter to the model), the data loader doesn't need to return it by default.\r\n\r\nHowever you can disable this behavior by setting `remove_unused_columns` to `False` to your training arguments. In this case in `compute_loss` you will get the full item with all the fields.\r\n\r\nNote that since the model doesn't take `example_id` as input, you will have to remove it from the inputs when `model(**inputs)` is called","Hi, I have set **args.remove_unused_columns=False** and **training_args.remove_unused_columns=False**, but the field doesn't been contained yet.\r\n```\r\ndef main():\r\n argp = HfArgumentParser(TrainingArguments)\r\n # The HfArgumentParser object collects command-line arguments into an object (and provides default values for unspecified arguments).\r\n # In particular, TrainingArguments has several keys that you'll need\/want to specify (when you call run.py from the command line):\r\n # --do_train\r\n # When included, this argument tells the script to train a model.\r\n # See docstrings for \"--task\" and \"--dataset\" for how the training dataset is selected.\r\n # --do_eval\r\n # When included, this argument tells the script to evaluate the trained\/loaded model on the validation split of the selected dataset.\r\n # --per_device_train_batch_size \r\n # This is the training batch size.\r\n # If you're running on GPU, you should try to make this as large as you can without getting CUDA out-of-memory errors.\r\n # For reference, with --max_length=128 and the default ELECTRA-small model, a batch size of 32 should fit in 4gb of GPU memory.\r\n # --num_train_epochs \r\n # How many passes to do through the training data.\r\n # --output_dir \r\n # Where to put the trained model checkpoint(s) and any eval predictions.\r\n # *This argument is required*.\r\n\r\n argp.add_argument('--model', type=str,\r\n default='google\/electra-small-discriminator',\r\n help=\"\"\"This argument specifies the base model to fine-tune.\r\n This should either be a HuggingFace model ID (see https:\/\/huggingface.co\/models)\r\n or a path to a saved model checkpoint (a folder containing config.json and pytorch_model.bin).\"\"\")\r\n argp.add_argument('--task', type=str, choices=['nli', 'qa'], required=True,\r\n help=\"\"\"This argument specifies which task to train\/evaluate on.\r\n Pass \"nli\" for natural language inference or \"qa\" for question answering.\r\n By default, \"nli\" will use the SNLI dataset, and \"qa\" will use the SQuAD dataset.\"\"\")\r\n argp.add_argument('--dataset', type=str, default=None,\r\n help=\"\"\"This argument overrides the default dataset used for the specified task.\"\"\")\r\n argp.add_argument('--max_length', type=int, default=128,\r\n help=\"\"\"This argument limits the maximum sequence length used during training\/evaluation.\r\n Shorter sequence lengths need less memory and computation time, but some examples may end up getting truncated.\"\"\")\r\n argp.add_argument('--max_train_samples', type=int, default=None,\r\n help='Limit the number of examples to train on.')\r\n argp.add_argument('--max_eval_samples', type=int, default=None,\r\n help='Limit the number of examples to evaluate on.')\r\n\r\n argp.remove_unused_columns = False\r\n training_args, args = argp.parse_args_into_dataclasses()\r\n args.remove_unused_columns=False\r\n training_args.remove_unused_columns=False\r\n```\r\n\r\n\r\n```\r\n**************** train_dataset: Dataset({\r\n features: ['id', 'title', 'context', 'question', 'answers'],\r\n num_rows: 87599\r\n})\r\n\r\n\r\n**************** train_dataset_featurized: Dataset({\r\n features: ['attention_mask', 'end_positions', 'input_ids', 'start_positions', 'token_type_ids'],\r\n num_rows: 87714\r\n})\r\n```","Hi, I print the value, all are set to False, but don't work.\r\n```\r\n********************* training_args: TrainingArguments(\r\n_n_gpu=1,\r\nadafactor=False,\r\nadam_beta1=0.9,\r\nadam_beta2=0.999,\r\nadam_epsilon=1e-08,\r\ndataloader_drop_last=False,\r\ndataloader_num_workers=0,\r\ndataloader_pin_memory=True,\r\nddp_find_unused_parameters=None,\r\ndebug=[],\r\ndeepspeed=None,\r\ndisable_tqdm=False,\r\ndo_eval=False,\r\ndo_predict=False,\r\ndo_train=True,\r\neval_accumulation_steps=None,\r\neval_steps=None,\r\nevaluation_strategy=IntervalStrategy.NO,\r\nfp16=False,\r\nfp16_backend=auto,\r\nfp16_full_eval=False,\r\nfp16_opt_level=O1,\r\ngradient_accumulation_steps=1,\r\ngreater_is_better=None,\r\ngroup_by_length=False,\r\nignore_data_skip=False,\r\nlabel_names=None,\r\nlabel_smoothing_factor=0.0,\r\nlearning_rate=5e-05,\r\nlength_column_name=length,\r\nload_best_model_at_end=False,\r\nlocal_rank=-1,\r\nlog_level=-1,\r\nlog_level_replica=-1,\r\nlog_on_each_node=True,\r\nlogging_dir=.\/re_trained_model\/runs\/Dec01_14-15-08_399b9290604c,\r\nlogging_first_step=False,\r\nlogging_steps=500,\r\nlogging_strategy=IntervalStrategy.STEPS,\r\nlr_scheduler_type=SchedulerType.LINEAR,\r\nmax_grad_norm=1.0,\r\nmax_steps=-1,\r\nmetric_for_best_model=None,\r\nmp_parameters=,\r\nno_cuda=False,\r\nnum_train_epochs=3.0,\r\noutput_dir=.\/re_trained_model,\r\noverwrite_output_dir=False,\r\npast_index=-1,\r\nper_device_eval_batch_size=8,\r\nper_device_train_batch_size=8,\r\nprediction_loss_only=False,\r\npush_to_hub=False,\r\npush_to_hub_model_id=re_trained_model,\r\npush_to_hub_organization=None,\r\npush_to_hub_token=None,\r\nremove_unused_columns=False,\r\nreport_to=['tensorboard'],\r\nresume_from_checkpoint=None,\r\nrun_name=.\/re_trained_model,\r\nsave_on_each_node=False,\r\nsave_steps=500,\r\nsave_strategy=IntervalStrategy.STEPS,\r\nsave_total_limit=None,\r\nseed=42,\r\nsharded_ddp=[],\r\nskip_memory_metrics=True,\r\ntpu_metrics_debug=False,\r\ntpu_num_cores=None,\r\nuse_legacy_prediction_loop=False,\r\nwarmup_ratio=0.0,\r\nwarmup_steps=0,\r\nweight_decay=0.0,\r\n)\r\n```\r\n```\r\n********************* args: Namespace(dataset='squad', max_eval_samples=None, max_length=128, max_train_samples=None, model='google\/electra-small-discriminator', remove_unused_columns=False, task='qa')\r\n2021-12-01 14:15:10,048 - WARNING - datasets.builder - Reusing dataset squad (\/root\/.cache\/huggingface\/datasets\/squad\/plain_text\/1.0.0\/d6ec3ceb99ca480ce37cdd35555d6cb2511d223b9150cce08a837ef62ffea453)\r\nSome weights of the model checkpoint at google\/electra-small-discriminator were not used when initializing ElectraForQuestionAnswering: ['discriminator_predictions.dense_prediction.weight', 'discriminator_predictions.dense_prediction.bias', 'discriminator_predictions.dense.weight', 'discriminator_predictions.dense.bias']\r\n- This IS expected if you are initializing ElectraForQuestionAnswering from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\r\n- This IS NOT expected if you are initializing ElectraForQuestionAnswering from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\r\nSome weights of ElectraForQuestionAnswering were not initialized from the model checkpoint at google\/electra-small-discriminator and are newly initialized: ['qa_outputs.bias', 'qa_outputs.weight']\r\nYou should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\r\nPreprocessing data... (this takes a little bit, should only happen once per dataset)\r\n```","Hmmm, it might be because the default data collator removes all the fields with `string` type:\r\n\r\nhttps:\/\/github.com\/huggingface\/transformers\/blob\/4c0dd199c8305903564c2edeae23d294edd4b321\/src\/transformers\/data\/data_collator.py#L107-L112\r\n\r\nI guess you also need a custom data collator that doesn't remove them.","can you give a tutorial about how to do this?","I overwrite **get_train_dataloader**, and remove **_remove_unused_columns**, but it doesn't work.\r\n\r\n```\r\n def get_train_dataloader(self) -> DataLoader:\r\n \"\"\"\r\n Returns the training :class:`~torch.utils.data.DataLoader`.\r\n\r\n Will use no sampler if :obj:`self.train_dataset` does not implement :obj:`__len__`, a random sampler (adapted\r\n to distributed training if necessary) otherwise.\r\n\r\n Subclass and override this method if you want to inject some custom behavior.\r\n \"\"\"\r\n if self.train_dataset is None:\r\n raise ValueError(\"Trainer: training requires a train_dataset.\")\r\n\r\n train_dataset = self.train_dataset\r\n # if is_datasets_available() and isinstance(train_dataset, datasets.Dataset):\r\n # train_dataset = self._remove_unused_columns(train_dataset, description=\"training\")\r\n\r\n if isinstance(train_dataset, torch.utils.data.IterableDataset):\r\n if self.args.world_size > 1:\r\n train_dataset = IterableDatasetShard(\r\n train_dataset,\r\n batch_size=self.args.train_batch_size,\r\n drop_last=self.args.dataloader_drop_last,\r\n num_processes=self.args.world_size,\r\n process_index=self.args.process_index,\r\n )\r\n\r\n return DataLoader(\r\n train_dataset,\r\n batch_size=self.args.train_batch_size,\r\n collate_fn=self.data_collator,\r\n num_workers=self.args.dataloader_num_workers,\r\n pin_memory=self.args.dataloader_pin_memory,\r\n )\r\n\r\n train_sampler = self._get_train_sampler()\r\n\r\n return DataLoader(\r\n train_dataset,\r\n batch_size=self.args.train_batch_size,\r\n sampler=train_sampler,\r\n collate_fn=self.data_collator,\r\n drop_last=self.args.dataloader_drop_last,\r\n num_workers=self.args.dataloader_num_workers,\r\n pin_memory=self.args.dataloader_pin_memory,\r\n )\r\n```","Hi, it works now, thank you.\r\n1. **args.remove_unused_columns=False** and **training_args.remove_unused_columns=False**\r\n2. overwrite **get_train_dataloader**, and remove **_remove_unused_columns**\r\n3. add new fields, and can be got in **inputs**. "],"created_at":1638351309000,"updated_at":1638374559000,"closed_at":1638374559000,"author_association":"NONE","active_lock_reason":null,"body":"Hi, I add one field **example_id**, but I can't see it in the **comput_loss** function, how can I do this? below is the information of inputs\r\n\r\n```\r\n*********************** inputs: {'attention_mask': tensor([[1, 1, 1, ..., 0, 0, 0],\r\n [1, 1, 1, ..., 0, 0, 0],\r\n [1, 1, 1, ..., 0, 0, 0],\r\n ...,\r\n [1, 1, 1, ..., 0, 0, 0],\r\n [1, 1, 1, ..., 0, 0, 0],\r\n [1, 1, 1, ..., 0, 0, 0]], device='cuda:0'), 'end_positions': tensor([ 25, 97, 93, 44, 25, 112, 109, 134], device='cuda:0'), 'input_ids': tensor([[ 101, 2054, 2390, ..., 0, 0, 0],\r\n [ 101, 2054, 2515, ..., 0, 0, 0],\r\n [ 101, 2054, 2106, ..., 0, 0, 0],\r\n ...,\r\n [ 101, 2339, 2001, ..., 0, 0, 0],\r\n [ 101, 2054, 2515, ..., 0, 0, 0],\r\n [ 101, 2054, 2003, ..., 0, 0, 0]], device='cuda:0'), 'start_positions': tensor([ 20, 90, 89, 41, 25, 96, 106, 132], device='cuda:0'), 'token_type_ids': tensor([[0, 0, 0, ..., 0, 0, 0],\r\n [0, 0, 0, ..., 0, 0, 0],\r\n [0, 0, 0, ..., 0, 0, 0],\r\n ...,\r\n [0, 0, 0, ..., 0, 0, 0],\r\n [0, 0, 0, ..., 0, 0, 0],\r\n [0, 0, 0, ..., 0, 0, 0]], device='cuda:0')} \r\n```\r\n\r\n```\r\n# This function preprocesses a question answering dataset, tokenizing the question and context text\r\n# and finding the right offsets for the answer spans in the tokenized context (to use as labels).\r\n# Adapted from https:\/\/github.com\/huggingface\/transformers\/blob\/master\/examples\/pytorch\/question-answering\/run_qa.py\r\ndef prepare_train_dataset_qa(examples, tokenizer, max_seq_length=None):\r\n questions = [q.lstrip() for q in examples[\"question\"]]\r\n max_seq_length = tokenizer.model_max_length\r\n # tokenize both questions and the corresponding context\r\n # if the context length is longer than max_length, we split it to several\r\n # chunks of max_length\r\n tokenized_examples = tokenizer(\r\n questions,\r\n examples[\"context\"],\r\n truncation=\"only_second\",\r\n max_length=max_seq_length,\r\n stride=min(max_seq_length \/\/ 2, 128),\r\n return_overflowing_tokens=True,\r\n return_offsets_mapping=True,\r\n padding=\"max_length\"\r\n )\r\n\r\n # Since one example might give us several features if it has a long context,\r\n # we need a map from a feature to its corresponding example.\r\n sample_mapping = tokenized_examples.pop(\"overflow_to_sample_mapping\")\r\n # The offset mappings will give us a map from token to character position\r\n # in the original context. This will help us compute the start_positions\r\n # and end_positions to get the final answer string.\r\n offset_mapping = tokenized_examples.pop(\"offset_mapping\")\r\n\r\n tokenized_examples[\"start_positions\"] = []\r\n tokenized_examples[\"end_positions\"] = []\r\n\r\n tokenized_examples[\"example_id\"] = []\r\n\r\n for i, offsets in enumerate(offset_mapping):\r\n input_ids = tokenized_examples[\"input_ids\"][i]\r\n # We will label features not containing the answer the index of the CLS token.\r\n cls_index = input_ids.index(tokenizer.cls_token_id)\r\n sequence_ids = tokenized_examples.sequence_ids(i)\r\n # from the feature idx to sample idx\r\n sample_index = sample_mapping[i]\r\n # get the answer for a feature\r\n answers = examples[\"answers\"][sample_index]\r\n\r\n tokenized_examples[\"example_id\"].append(examples[\"id\"][sample_index])\r\n\r\n if len(answers[\"answer_start\"]) == 0:\r\n tokenized_examples[\"start_positions\"].append(cls_index)\r\n tokenized_examples[\"end_positions\"].append(cls_index)\r\n else:\r\n # Start\/end character index of the answer in the text.\r\n start_char = answers[\"answer_start\"][0]\r\n end_char = start_char + len(answers[\"text\"][0])\r\n\r\n # Start token index of the current span in the text.\r\n token_start_index = 0\r\n while sequence_ids[token_start_index] != 1:\r\n token_start_index += 1\r\n\r\n # End token index of the current span in the text.\r\n token_end_index = len(input_ids) - 1\r\n while sequence_ids[token_end_index] != 1:\r\n token_end_index -= 1\r\n\r\n # Detect if the answer is out of the span (in which case this feature is labeled with the CLS index).\r\n if not (offsets[token_start_index][0] <= start_char and\r\n offsets[token_end_index][1] >= end_char):\r\n tokenized_examples[\"start_positions\"].append(cls_index)\r\n tokenized_examples[\"end_positions\"].append(cls_index)\r\n else:\r\n # Otherwise move the token_start_index and token_end_index to the two ends of the answer.\r\n # Note: we could go after the last offset if the answer is the last word (edge case).\r\n while token_start_index < len(offsets) and \\\r\n offsets[token_start_index][0] <= start_char:\r\n token_start_index += 1\r\n tokenized_examples[\"start_positions\"].append(\r\n token_start_index - 1)\r\n while offsets[token_end_index][1] >= end_char:\r\n token_end_index -= 1\r\n tokenized_examples[\"end_positions\"].append(token_end_index + 1)\r\n\r\n return tokenized_examples\r\n```\r\n\r\n_Originally posted by @yanllearnn in https:\/\/github.com\/huggingface\/datasets\/issues\/3333#issuecomment-983457161_","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3353\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3353\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3352","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3352\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3352\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3352\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3352","id":1068102994,"node_id":"PR_kwDODunzps4vO6uZ","number":3352,"title":"Make LABR dataset streamable","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1638346947000,"updated_at":1638355742000,"closed_at":1638355741000,"author_association":"MEMBER","active_lock_reason":null,"body":"Fix LABR dataset to make it streamable.\r\n\r\nRelated to: #3350.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3352\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3352\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3352","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3352","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3352.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3352.patch","merged_at":1638355741000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3351","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3351\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3351\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3351\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3351","id":1068094873,"node_id":"PR_kwDODunzps4vO5AS","number":3351,"title":"Add VCTK dataset","user":{"login":"jaketae","id":25360440,"node_id":"MDQ6VXNlcjI1MzYwNDQw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25360440?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jaketae","html_url":"https:\/\/github.com\/jaketae","followers_url":"https:\/\/api.github.com\/users\/jaketae\/followers","following_url":"https:\/\/api.github.com\/users\/jaketae\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jaketae\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jaketae\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jaketae\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jaketae\/orgs","repos_url":"https:\/\/api.github.com\/users\/jaketae\/repos","events_url":"https:\/\/api.github.com\/users\/jaketae\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jaketae\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hello @patrickvonplaten, I hope it's okay to ping you with a (dumb) question!\r\n\r\nI've been trying to get `dl_manager.download_and_extract(_DL_URL)` to work with no avail. I verified that this is a problem on two different machines (lab server, GCP), so I doubt it's an issue with network connectivity. Here is the full trace.\r\n\r\n```\r\n(venv) (base) jaketae@jake-gpu1:~\/documents\/datasets$ datasets-cli test datasets\/vctk --save_infos --all_configs\r\nTesting builder 'main' (1\/1)\r\nDownloading and preparing dataset vctk\/main to \/home\/jaketae\/.cache\/huggingface\/datasets\/vctk\/main\/0.9.2\/2bfa52a93469fa9d6d4b1831c6511db5442b9f4e48620aef2bc3890d7a5268a8...\r\nTraceback (most recent call last):\r\n File \"\/home\/jaketae\/documents\/datasets\/venv\/bin\/datasets-cli\", line 33, in \r\n sys.exit(load_entry_point('datasets', 'console_scripts', 'datasets-cli')())\r\n File \"\/home\/jaketae\/documents\/datasets\/src\/datasets\/commands\/datasets_cli.py\", line 33, in main\r\n service.run()\r\n File \"\/home\/jaketae\/documents\/datasets\/src\/datasets\/commands\/test.py\", line 146, in run\r\n builder.download_and_prepare(\r\n File \"\/home\/jaketae\/documents\/datasets\/src\/datasets\/builder.py\", line 593, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/home\/jaketae\/documents\/datasets\/src\/datasets\/builder.py\", line 659, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/home\/jaketae\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/vctk\/2bfa52a93469fa9d6d4b1831c6511db5442b9f4e48620aef2bc3890d7a5268a8\/vctk.py\", line 76, in _split_generators\r\n root_path = dl_manager.download_and_extract(_DL_URL)\r\n File \"\/home\/jaketae\/documents\/datasets\/src\/datasets\/utils\/download_manager.py\", line 283, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/home\/jaketae\/documents\/datasets\/src\/datasets\/utils\/download_manager.py\", line 195, in download\r\n downloaded_path_or_paths = map_nested(\r\n File \"\/home\/jaketae\/documents\/datasets\/src\/datasets\/utils\/py_utils.py\", line 234, in map_nested\r\n return function(data_struct)\r\n File \"\/home\/jaketae\/documents\/datasets\/src\/datasets\/utils\/download_manager.py\", line 216, in _download\r\n return cached_path(url_or_filename, download_config=download_config)\r\n File \"\/home\/jaketae\/documents\/datasets\/src\/datasets\/utils\/file_utils.py\", line 298, in cached_path\r\n output_path = get_from_cache(\r\n File \"\/home\/jaketae\/documents\/datasets\/src\/datasets\/utils\/file_utils.py\", line 608, in get_from_cache\r\n raise ConnectionError(f\"Couldn't reach {url}\")\r\nConnectionError: Couldn't reach https:\/\/datashare.is.ed.ac.uk\/bitstream\/handle\/10283\/3443\/VCTK-Corpus-0.92.zip\r\n```\r\n\r\nOn my local, however, the URL correctly points to the download zip file. My admittedly naive guess is that the website is web-crawler or scraper proof (requiring specific headers, etc.), but I also think I might have just missed a very basic step in the process.\r\n\r\nApologies for the delayed PR, and TIA for the help!","Hey @jaketae, \r\n\r\nHmm, yeah I don't know really either - the link also works correctly for me when doing:\r\n\r\n```\r\nwget https:\/\/datashare.is.ed.ac.uk\/bitstream\/handle\/10283\/3443\/VCTK-Corpus-0.92.zip\r\n```\r\n\r\nI think however that I had a similar problem previously with Edinburgh's (`.ed.ac.uk`) websites which I solved with the following hack. Not sure if this could be the same problem here...\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/e1104ad5d3e83f8b1571e0d6fef4fdabf0a1fde5\/datasets\/ami\/ami.py#L364\r\n\r\n","The AMI dataset is stored under a different website though it seems: `\"https:\/\/groups.inf.ed.ac.uk\/ami\/AMICorpusMirror\/\/amicorpus\/{}\/audio\/{}\"`\r\n\r\nso not 100p sure if this solves the problem","Hi @patrickvonplaten,\r\n\r\nThanks for the feedback! Sadly, disabling multi-processing didn't cut it for me. \r\n\r\nI've been looking at VCTK code in [`torchaudio`](https:\/\/pytorch.org\/audio\/stable\/_modules\/torchaudio\/datasets\/vctk.html) and [`tfds`](https:\/\/github.com\/tensorflow\/datasets\/blob\/master\/tensorflow_datasets\/audio\/vctk.py). I don't think they're using a hack to accomplish this, so I'll try to look into it to see if I can pinpoint the cause. I'll keep you in the loop here. Thank you!","Hi @patrickvonplaten, \r\n\r\nAfter more investigation, I found that simply increasing `etag_timeout` in `get_from_cache` from 10 to 100 solved it. However, unless I'm missing something, an issue is that `etag_timeout` is basically hard-coded as a default parameter because `cached_path`, which calls `get_from_cache` has no way of modifying the default. \r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/b25ac1d62670e7b339ed552ecc37846d2abd30c7\/src\/datasets\/utils\/file_utils.py#L298-L310\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/b25ac1d62670e7b339ed552ecc37846d2abd30c7\/src\/datasets\/utils\/file_utils.py#L497-L510\r\n\r\n\r\nI can think of two solutions.\r\n\r\n* Simply increase the default to 100\r\n* Allow `etag_timeout` to be modifiable on a per-dataset basis by integrating it to `download_config` (maybe this is already supported?)\r\n\r\nThank you!","I think in this case we can increase the `etag_timeout` - what do you think @lhoestq @albertvillanova ?","Yes let's increase it to 100 for the moment. Later we can see if it really needed to move it into `download_config` or not","Thanks for the feedback @patrickvonplaten @lhoestq, I'll continue working on this in that direction!","Hello @patrickvonplaten, VCTK is ready for review! \r\n\r\n```python\r\n>>> from datasets import load_dataset\r\n>>> ds = load_dataset(\"vctk\")\r\nUsing the latest cached version of the module from \/home\/lily\/jt856\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/vctk\/b7aa278182de3a7aa2897cbd12c1e19f1af9840a2ead69a6d710fdbc1d2df02a (last modified on Sat Dec 25 00:47:31 2021) since it couldn't be found locally at vctk., or remotely on the Hugging Face Hub.\r\nReusing dataset vctk (\/home\/lily\/jt856\/.cache\/huggingface\/datasets\/vctk\/main\/0.9.2\/b7aa278182de3a7aa2897cbd12c1e19f1af9840a2ead69a6d710fdbc1d2df02a)\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 1\/1 [00:00<00:00, 198.35it\/s]\r\n>>> len(ds[\"train\"])\r\n88156\r\n>>> ds[\"train\"][0]\r\n{'speaker_id': 'p225', 'audio': {'path': '\/home\/lily\/jt856\/.cache\/huggingface\/datasets\/downloads\/extracted\/8ed7dad05dfffdb552a3699777442af8e8ed11e656feb277f35bf9aea448f49e\/wav48_silence_trimmed\/p225\/p225_001_mic1.flac', 'array': array([0.00485229, 0.00689697, 0.00619507, ..., 0.00811768, 0.00836182,\r\n 0.00854492], dtype=float32), 'sampling_rate': 48000}, 'file': '\/home\/lily\/jt856\/.cache\/huggingface\/datasets\/downloads\/extracted\/8ed7dad05dfffdb552a3699777442af8e8ed11e656feb277f35bf9aea448f49e\/wav48_silence_trimmed\/p225\/p225_001_mic1.flac', 'text': 'Please call Stella.', 'text_id': '001', 'age': '23', 'gender': 'F', 'accent': 'English', 'region': 'Southern England', 'comment': ''}\r\n```\r\nA number of tests are failing on CircleCI, but from my limited knowledge they appear to be complaining about `conda` and `pip`\/`wheel`-related incompatibilities. But if I'm reading them wrong and it's an issue with this PR, please let me know and I'll try to fix them.\r\n\r\nBelated merry Christmas and a happy new year!"],"created_at":1638346397000,"updated_at":1640704320000,"closed_at":1640703908000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Fixes #1837. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3351\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3351\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3351","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3351","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3351.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3351.patch","merged_at":1640703907000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3350","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3350\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3350\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3350\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3350","id":1068078160,"node_id":"PR_kwDODunzps4vO1aj","number":3350,"title":"Avoid content-encoding issue while streaming datasets","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1638345408000,"updated_at":1638346501000,"closed_at":1638346500000,"author_association":"MEMBER","active_lock_reason":null,"body":"This PR will fix streaming of datasets served with gzip content-encoding:\r\n```\r\nClientPayloadError: 400, message='Can not decode content-encoding: gzip'\r\n```\r\n\r\nFix #2918.\r\n\r\nCC: @severo ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3350\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3350\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3350","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3350","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3350.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3350.patch","merged_at":1638346500000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3349","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3349\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3349\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3349\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3349","id":1067853601,"node_id":"PR_kwDODunzps4vOF-s","number":3349,"title":"raise exception instead of using assertions.","user":{"login":"manisnesan","id":153142,"node_id":"MDQ6VXNlcjE1MzE0Mg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/153142?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/manisnesan","html_url":"https:\/\/github.com\/manisnesan","followers_url":"https:\/\/api.github.com\/users\/manisnesan\/followers","following_url":"https:\/\/api.github.com\/users\/manisnesan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/manisnesan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/manisnesan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/manisnesan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/manisnesan\/orgs","repos_url":"https:\/\/api.github.com\/users\/manisnesan\/repos","events_url":"https:\/\/api.github.com\/users\/manisnesan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/manisnesan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["@mariosasko - Thanks for the review & suggestions. Updated as per the suggestions. ","@mariosasko - Hello, Are there any additional changes required from my end??. Wondering if this PR can be merged or still pending on additional steps.","@mariosasko - The approved changes in the PR now has conflicts with the master branch. Would you like me to resolve the conflicts??. Let me know. ","@mariosasko @lhoestq - Gentle reminder about my previous question. ","Hi ! Thanks for the heads up :)\r\nI just resolved the conflicts, it should be alright now","Merging, thanks for the help @manisnesan !"],"created_at":1638322671000,"updated_at":1640016447000,"closed_at":1640016447000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"fix for the remaining files https:\/\/github.com\/huggingface\/datasets\/issues\/3171","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3349\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3349\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3349","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3349","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3349.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3349.patch","merged_at":1640016447000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3348","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3348\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3348\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3348\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3348","id":1067831113,"node_id":"PR_kwDODunzps4vOBOQ","number":3348,"title":"BLEURT: Match key names to correspond with filename","user":{"login":"jaehlee","id":11873078,"node_id":"MDQ6VXNlcjExODczMDc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11873078?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jaehlee","html_url":"https:\/\/github.com\/jaehlee","followers_url":"https:\/\/api.github.com\/users\/jaehlee\/followers","following_url":"https:\/\/api.github.com\/users\/jaehlee\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jaehlee\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jaehlee\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jaehlee\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jaehlee\/orgs","repos_url":"https:\/\/api.github.com\/users\/jaehlee\/repos","events_url":"https:\/\/api.github.com\/users\/jaehlee\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jaehlee\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Thanks for the suggestion! I think the current checked-in `CHECKPOINT_URLS` is already not working. I believe anyone who tried using the new ckpts (`BLEURT-20-X`) can't unless this fix is in. The zip file from bleurt side unzips to directory name matching the filename (capitalized for new ones). For example without current changes I'd get the following error\r\n\r\n```\r\n---------------------------------------------------------------------------\r\nAssertionError Traceback (most recent call last)\r\n in ()\r\n 1 predictions = [\"hello there\", \"general kenobi\"]\r\n 2 references = [\"hello there\", \"general kenobi\"]\r\n----> 3 bleurt = datasets.load_metric(\"bleurt\", \"bleurt-20\")\r\n 4 results = bleurt.compute(predictions=predictions, references=references)\r\n\r\n4 frames\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/bleurt\/checkpoint.py in read_bleurt_config(path)\r\n 84 \"\"\"Reads and checks config file from a BLEURT checkpoint.\"\"\"\r\n 85 assert tf.io.gfile.exists(path), \\\r\n---> 86 \"Could not find BLEURT checkpoint {}\".format(path)\r\n 87 config_path = os.path.join(path, CONFIG_FILE)\r\n 88 assert tf.io.gfile.exists(config_path), \\\r\n\r\nAssertionError: Could not find BLEURT checkpoint \/root\/.cache\/huggingface\/metrics\/bleurt\/bleurt-20\/downloads\/extracted\/e34c60f1a05394ecda54e253a10413ca7b5d59f9a23f3cc73258c6b78ffa2f50\/bleurt-20\r\n```\r\ninspecting specified path I see that directory name is `BLEURT-20` instead of `bleurt-20`. \r\nOther solution similar to your suggestion is meddle with `dl_manager.download_and_extract` to unzip to paths with lowering all the paths but I imagine this will affect other parts of the library. ","Indeed, good catch ! Your solution that fixes `CHECKPOINT_URLS ` is simple and works well, thanks :)\r\n\r\nFurthermore to avoid breaking changes though we could also keep the support for the lowercase one:\r\n```python\r\n if self.config_name.lower() in CHECKPOINT_URLS:\r\n checkpoint_name = self.config_name.lower()\r\n elif self.config_name.upper() in CHECKPOINT_URLS:\r\n checkpoint_name = self.config_name.upper()\r\n else:\r\n raise KeyError(\r\n f\"{self.config_name} model not found. You should supply the name of a model checkpoint for bleurt in {CHECKPOINT_URLS.keys()}\"\r\n )\r\n```\r\nand then we can use `checkpoint_name` instead of `self.config_name` to download and instantiate the model:\r\n```python\r\n model_path = dl_manager.download_and_extract(CHECKPOINT_URLS[checkpoint_name])\r\n self.scorer = score.BleurtScorer(os.path.join(model_path, checkpoint_name))\r\n```\r\n\r\nPlease let me know if that sounds reasonable to you !","Thanks for the suggestion! I believe your suggestion should work to make keys case insensitive. Changes are committed to the PR now. "],"created_at":1638320478000,"updated_at":1638893217000,"closed_at":1638893217000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"In order to properly locate downloaded ckpt files key name needs to match filename. Correcting change introduced in #3235 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3348\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3348\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3348","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3348","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3348.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3348.patch","merged_at":1638893217000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3347","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3347\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3347\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3347\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3347","id":1067738902,"node_id":"PR_kwDODunzps4vNthw","number":3347,"title":"iter_archive for zip files ","user":{"login":"Mehdi2402","id":56029953,"node_id":"MDQ6VXNlcjU2MDI5OTUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56029953?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Mehdi2402","html_url":"https:\/\/github.com\/Mehdi2402","followers_url":"https:\/\/api.github.com\/users\/Mehdi2402\/followers","following_url":"https:\/\/api.github.com\/users\/Mehdi2402\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Mehdi2402\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Mehdi2402\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Mehdi2402\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Mehdi2402\/orgs","repos_url":"https:\/\/api.github.com\/users\/Mehdi2402\/repos","events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["And also don't always try streaming with Google Drive - it can have issues because of how Google Drive works (with quotas, restrictions, etc.) and it can indeed cause `BlockSizeError`.\r\n\r\nFeel free to host your test data elsewhere, such as in a dataset repository on https:\/\/huggingface.co (see [here](https:\/\/huggingface.co\/docs\/datasets\/upload_dataset.html#upload-your-files) for a tutorial on how to upload files)"],"created_at":1638311657000,"updated_at":1638577342000,"closed_at":1638577331000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"* In this PR, I added the option to iterate through zipfiles for `download_manager.py` only. \r\n* Next PR will be the same applied to `streaming_download_manager.py`.\r\n* Related issue #3272.\r\n## Comments : \r\n* There is no `.isreg()` equivalent in zipfile library to check if file is Regular so I used `.is_dir()` instead to skip directories.\r\n* For now I got `streaming_download_manager.py` working for local zip files, but not for urls. I get the following error when I test it on an archive in google drive, so still working on it. `BlockSizeError: Got more bytes so far (>2112) than requested (22)`\r\n\r\n\r\n## Tasks : \r\n- [x] download_manager.py\r\n- [ ] streaming_download_manager.py","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3347\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3347\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3347","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3347","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3347.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3347.patch","merged_at":null},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3346","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3346\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3346\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3346\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3346","id":1067632365,"node_id":"I_kwDODunzps4_osbt","number":3346,"title":"Failed to convert `string` with pyarrow for QED since 1.15.0","user":{"login":"tianjianjiang","id":4812544,"node_id":"MDQ6VXNlcjQ4MTI1NDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4812544?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tianjianjiang","html_url":"https:\/\/github.com\/tianjianjiang","followers_url":"https:\/\/api.github.com\/users\/tianjianjiang\/followers","following_url":"https:\/\/api.github.com\/users\/tianjianjiang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tianjianjiang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tianjianjiang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tianjianjiang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tianjianjiang\/orgs","repos_url":"https:\/\/api.github.com\/users\/tianjianjiang\/repos","events_url":"https:\/\/api.github.com\/users\/tianjianjiang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tianjianjiang\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Scratch that, probably the old and incompatible usage of dataset builder from promptsource.","Actually, re-opening this issue cause the error persists\r\n\r\n```python\r\n>>> load_dataset(\"qed\")\r\nDownloading and preparing dataset qed\/qed (download: 13.43 MiB, generated: 9.70 MiB, post-processed: Unknown size, total: 23.14 MiB) to \/home\/victor_huggingface_co\/.cache\/huggingface\/datasets\/qed\/qed\/1.0.0\/47d8b6f033393aa520a8402d4baf2d6bdc1b2fbde3dc156e595d2ef34caf7d75...\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2\/2 [00:00<00:00, 2228.64it\/s]\r\nTraceback (most recent call last): \r\n File \"\", line 1, in \r\n File \"\/home\/victor_huggingface_co\/miniconda3\/envs\/promptsource\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 1669, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/home\/victor_huggingface_co\/miniconda3\/envs\/promptsource\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 594, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/home\/victor_huggingface_co\/miniconda3\/envs\/promptsource\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 681, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/home\/victor_huggingface_co\/miniconda3\/envs\/promptsource\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 1083, in _prepare_split\r\n num_examples, num_bytes = writer.finalize()\r\n File \"\/home\/victor_huggingface_co\/miniconda3\/envs\/promptsource\/lib\/python3.7\/site-packages\/datasets\/arrow_writer.py\", line 468, in finalize\r\n self.write_examples_on_file()\r\n File \"\/home\/victor_huggingface_co\/miniconda3\/envs\/promptsource\/lib\/python3.7\/site-packages\/datasets\/arrow_writer.py\", line 339, in write_examples_on_file\r\n pa_array = pa.array(typed_sequence)\r\n File \"pyarrow\/array.pxi\", line 229, in pyarrow.lib.array\r\n File \"pyarrow\/array.pxi\", line 110, in pyarrow.lib._handle_arrow_array_protocol\r\n File \"\/home\/victor_huggingface_co\/miniconda3\/envs\/promptsource\/lib\/python3.7\/site-packages\/datasets\/arrow_writer.py\", line 125, in __arrow_array__\r\n out = pa.array(cast_to_python_objects(self.data, only_1d_for_numpy=True), type=type)\r\n File \"pyarrow\/array.pxi\", line 315, in pyarrow.lib.array\r\n File \"pyarrow\/array.pxi\", line 39, in pyarrow.lib._sequence_to_array\r\n File \"pyarrow\/error.pxi\", line 143, in pyarrow.lib.pyarrow_internal_check_status\r\n File \"pyarrow\/error.pxi\", line 99, in pyarrow.lib.check_status\r\npyarrow.lib.ArrowInvalid: Could not convert 'in' with type str: tried to convert to boolean\r\n```\r\n\r\nEnvironment (datasets and pyarrow):\r\n\r\n```bash\r\n(promptsource) victor_huggingface_co@victor-dev:~\/promptsource$ datasets-cli env\r\n\r\nCopy-and-paste the text below in your GitHub issue.\r\n\r\n- `datasets` version: 1.16.1\r\n- Platform: Linux-5.0.0-1020-gcp-x86_64-with-debian-buster-sid\r\n- Python version: 3.7.11\r\n- PyArrow version: 6.0.1\r\n```\r\n```bash\r\n(promptsource) victor_huggingface_co@victor-dev:~\/promptsource$ pip show pyarrow\r\nName: pyarrow\r\nVersion: 6.0.1\r\nSummary: Python library for Apache Arrow\r\nHome-page: https:\/\/arrow.apache.org\/\r\nAuthor: \r\nAuthor-email: \r\nLicense: Apache License, Version 2.0\r\nLocation: \/home\/victor_huggingface_co\/miniconda3\/envs\/promptsource\/lib\/python3.7\/site-packages\r\nRequires: numpy\r\nRequired-by: streamlit, datasets\r\n```"],"created_at":1638303102000,"updated_at":1639492745000,"closed_at":1639492745000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"## Describe the bug\r\nLoading QED was fine until 1.15.0.\r\nrelated: bigscience-workshop\/promptsource#659, bigscience-workshop\/promptsource#670\r\n\r\nNot sure where the root cause is, but here are some candidates:\r\n- #3158\r\n- #3120\r\n- #3196\r\n- #2891\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nload_dataset(\"qed\")\r\n```\r\n\r\n## Expected results\r\nLoading completed.\r\n\r\n## Actual results\r\n```shell\r\nArrowInvalid: Could not convert in with type str: tried to convert to boolean\r\nTraceback:\r\nFile \"\/Users\/s0s0cr3\/Library\/Python\/3.9\/lib\/python\/site-packages\/streamlit\/script_runner.py\", line 354, in _run_script\r\n exec(code, module.__dict__)\r\nFile \"\/Users\/s0s0cr3\/Documents\/GitHub\/promptsource\/promptsource\/app.py\", line 260, in \r\n dataset = get_dataset(dataset_key, str(conf_option.name) if conf_option else None)\r\nFile \"\/Users\/s0s0cr3\/Library\/Python\/3.9\/lib\/python\/site-packages\/streamlit\/caching.py\", line 543, in wrapped_func\r\n return get_or_create_cached_value()\r\nFile \"\/Users\/s0s0cr3\/Library\/Python\/3.9\/lib\/python\/site-packages\/streamlit\/caching.py\", line 527, in get_or_create_cached_value\r\n return_value = func(*args, **kwargs)\r\nFile \"\/Users\/s0s0cr3\/Documents\/GitHub\/promptsource\/promptsource\/utils.py\", line 49, in get_dataset\r\n builder_instance.download_and_prepare()\r\nFile \"\/Users\/s0s0cr3\/Library\/Python\/3.9\/lib\/python\/site-packages\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\nFile \"\/Users\/s0s0cr3\/Library\/Python\/3.9\/lib\/python\/site-packages\/datasets\/builder.py\", line 697, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\nFile \"\/Users\/s0s0cr3\/Library\/Python\/3.9\/lib\/python\/site-packages\/datasets\/builder.py\", line 1106, in _prepare_split\r\n num_examples, num_bytes = writer.finalize()\r\nFile \"\/Users\/s0s0cr3\/Library\/Python\/3.9\/lib\/python\/site-packages\/datasets\/arrow_writer.py\", line 456, in finalize\r\n self.write_examples_on_file()\r\nFile \"\/Users\/s0s0cr3\/Library\/Python\/3.9\/lib\/python\/site-packages\/datasets\/arrow_writer.py\", line 325, in write_examples_on_file\r\n pa_array = pa.array(typed_sequence)\r\nFile \"pyarrow\/array.pxi\", line 222, in pyarrow.lib.array\r\nFile \"pyarrow\/array.pxi\", line 110, in pyarrow.lib._handle_arrow_array_protocol\r\nFile \"\/Users\/s0s0cr3\/Library\/Python\/3.9\/lib\/python\/site-packages\/datasets\/arrow_writer.py\", line 121, in __arrow_array__\r\n out = pa.array(cast_to_python_objects(self.data, only_1d_for_numpy=True), type=type)\r\nFile \"pyarrow\/array.pxi\", line 305, in pyarrow.lib.array\r\nFile \"pyarrow\/array.pxi\", line 39, in pyarrow.lib._sequence_to_array\r\nFile \"pyarrow\/error.pxi\", line 122, in pyarrow.lib.pyarrow_internal_check_status\r\nFile \"pyarrow\/error.pxi\", line 84, in pyarrow.lib.check_status\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.15.0, 1.16.1\r\n- Platform: macOS 1.15.7 or above\r\n- Python version: 3.7.12 and 3.9\r\n- PyArrow version: 3.0.0, 5.0.0, 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3346\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3346\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3345","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3345\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3345\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3345\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3345","id":1067622951,"node_id":"I_kwDODunzps4_oqIn","number":3345,"title":"Failed to download species_800 from Google Drive zip file","user":{"login":"tianjianjiang","id":4812544,"node_id":"MDQ6VXNlcjQ4MTI1NDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4812544?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tianjianjiang","html_url":"https:\/\/github.com\/tianjianjiang","followers_url":"https:\/\/api.github.com\/users\/tianjianjiang\/followers","following_url":"https:\/\/api.github.com\/users\/tianjianjiang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tianjianjiang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tianjianjiang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tianjianjiang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tianjianjiang\/orgs","repos_url":"https:\/\/api.github.com\/users\/tianjianjiang\/repos","events_url":"https:\/\/api.github.com\/users\/tianjianjiang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tianjianjiang\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi,\r\n\r\nthe dataset is downloaded normally on my machine. Maybe the URL was down at the time of your download. Could you try again?","> Hi,\r\n> \r\n> the dataset is downloaded normally on my machine. Maybe the URL was down at the time of your download. Could you try again?\r\n\r\nI have tried that many times with both load_dataset() and a browser almost simultaneously. The browser always works for me while load_dataset() fails.","@mariosasko \r\n> the dataset is downloaded normally on my machine. Maybe the URL was down at the time of your download. Could you try again?\r\n\r\nI've tried yet again just a moment ago. This time I realize that, the step `(... post-processed: Unknown size, total: 20.89 MiB) to \/Users\/mike\/.cache\/huggingface\/datasets\/species800\/species_800\/1.0.0\/532167f0bb8fbc0d77d6d03c4fd642c8c55527b9c5f2b1da77f3d00b0e559976...` and the one after seem unstable. If I want to retry, I will have to delete it (and probably other cache lock files). It **_sometimes_** works.\r\n\r\nBut I didn't try `download_mode=\"force_redownload\"` yet.\r\n\r\nAnyway, I suppose this isn't really a pressing issue for the time being, so I'm going to close this. Thank you.\r\n\r\n"],"created_at":1638302428000,"updated_at":1638381195000,"closed_at":1638381195000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"## Describe the bug\r\nOne can manually download the zip file on Google Drive, but `load_dataset()` cannot.\r\nrelated: #3248\r\n\r\n## Steps to reproduce the bug\r\n```shell\r\n> python\r\nPython 3.7.12 (default, Sep 5 2021, 08:34:29)\r\n[Clang 11.0.3 (clang-1103.0.32.62)] on darwin\r\nType \"help\", \"copyright\", \"credits\" or \"license\" for more information.\r\n```\r\n```python\r\n>>> from datasets import load_dataset\r\n>>> s800 = load_dataset(\"species_800\")\r\n```\r\n\r\n## Expected results\r\nspecies_800 downloaded.\r\n\r\n## Actual results\r\n```shell\r\nDownloading: 5.68kB [00:00, 1.22MB\/s]\r\nDownloading: 2.70kB [00:00, 691kB\/s]\r\nDownloading and preparing dataset species800\/species_800 (download: 17.36 MiB, generated: 3.53 MiB, post-processed: Unknown size, total: 20.89 MiB) to \/Users\/mike\/.cache\/huggingface\/datasets\/species800\/species_800\/1.0.0\/532167f0bb8fbc0d77d6d03c4fd642c8c55527b9c5f2b1da77f3d00b0e559976...\r\n 0%| | 0\/1 [00:00, ?it\/s]Traceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 1632, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 608, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 675, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/Users\/mike\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/species_800\/532167f0bb8fbc0d77d6d03c4fd642c8c55527b9c5f2b1da77f3d00b0e559976\/species_800.py\", line 104, in _split_generators\r\n downloaded_files = dl_manager.download_and_extract(urls_to_download)\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 284, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 197, in download\r\n download_func, url_or_urls, map_tuple=True, num_proc=download_config.num_proc, disable_tqdm=False\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 209, in map_nested\r\n for obj in utils.tqdm(iterable, disable=disable_tqdm)\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 209, in \r\n for obj in utils.tqdm(iterable, disable=disable_tqdm)\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 143, in _single_map_nested\r\n return function(data_struct)\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 217, in _download\r\n return cached_path(url_or_filename, download_config=download_config)\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 305, in cached_path\r\n use_auth_token=download_config.use_auth_token,\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 594, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach https:\/\/drive.google.com\/u\/0\/uc?id=1OletxmPYNkz2ltOr9pyT0b0iBtUWxslh&export=download\/\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.14,0 1.15.0, 1.16.1\r\n- Platform: macOS Catalina 10.15.7\r\n- Python version: 3.7.12\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3345\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3345\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3344","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3344\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3344\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3344\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3344","id":1067567603,"node_id":"PR_kwDODunzps4vNJwd","number":3344,"title":"Add ArrayXD docs","user":{"login":"stevhliu","id":59462357,"node_id":"MDQ6VXNlcjU5NDYyMzU3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59462357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stevhliu","html_url":"https:\/\/github.com\/stevhliu","followers_url":"https:\/\/api.github.com\/users\/stevhliu\/followers","following_url":"https:\/\/api.github.com\/users\/stevhliu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stevhliu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stevhliu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stevhliu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stevhliu\/orgs","repos_url":"https:\/\/api.github.com\/users\/stevhliu\/repos","events_url":"https:\/\/api.github.com\/users\/stevhliu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stevhliu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1638298411000,"updated_at":1638389763000,"closed_at":1638387332000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Documents support for dynamic first dimension in `ArrayXD` from #2891, and explain the `ArrayXD` feature in general. \r\n\r\nLet me know if I'm missing anything @lhoestq :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3344\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3344\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3344","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3344","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3344.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3344.patch","merged_at":1638387332000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3343","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3343\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3343\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3343\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3343","id":1067505507,"node_id":"PR_kwDODunzps4vM8yB","number":3343,"title":"Better error message when download fails","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1638293930000,"updated_at":1638358079000,"closed_at":1638358078000,"author_association":"MEMBER","active_lock_reason":null,"body":"From our discussions in https:\/\/github.com\/huggingface\/datasets\/issues\/3269 and https:\/\/github.com\/huggingface\/datasets\/issues\/3282 it would be nice to have better messages if a download fails.\r\n\r\nIn particular the error now shows:\r\n- the error from the HEAD request if there's one\r\n- otherwise the response code of the HEAD request\r\n\r\nI also added an error to tell users to pass `use_auth_token` when the Hugging Face Hub returns 401 (Unauthorized).\r\n\r\nWhile paying around with this I also fixed a minor issue with the `force_download` parameter that was not always taken into account","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3343\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3343\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3343","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3343","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3343.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3343.patch","merged_at":1638358078000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3342","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3342\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3342\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3342\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3342","id":1067481390,"node_id":"PR_kwDODunzps4vM3wh","number":3342,"title":"Fix ASSET dataset data URLs","user":{"login":"tianjianjiang","id":4812544,"node_id":"MDQ6VXNlcjQ4MTI1NDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4812544?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tianjianjiang","html_url":"https:\/\/github.com\/tianjianjiang","followers_url":"https:\/\/api.github.com\/users\/tianjianjiang\/followers","following_url":"https:\/\/api.github.com\/users\/tianjianjiang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tianjianjiang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tianjianjiang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tianjianjiang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tianjianjiang\/orgs","repos_url":"https:\/\/api.github.com\/users\/tianjianjiang\/repos","events_url":"https:\/\/api.github.com\/users\/tianjianjiang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tianjianjiang\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["> Hi @tianjianjiang, thanks for the fix.\r\n> The links should also be updated in the `dataset_infos.json` file.\r\n> The failing tests are due to the missing tag in the header of the `README.md` file:\r\n\r\nHi @albertvillanova, thank you for the info! My apologies for the messy PR.\r\n"],"created_at":1638292410000,"updated_at":1639493400000,"closed_at":1639493400000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Change the branch name \"master\" to \"main\" in the data URLs, since facebookresearch has changed that.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3342\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3342\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3342","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3342","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3342.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3342.patch","merged_at":1639493400000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3341","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3341\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3341\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3341\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3341","id":1067449569,"node_id":"I_kwDODunzps4_n_zh","number":3341,"title":"Mirror the canonical datasets to the Hugging Face Hub","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["I created a GitHub project to keep track of what needs to be done:\r\nhttps:\/\/github.com\/huggingface\/datasets\/projects\/3\r\n\r\nI also store my code in a (private for now) repository at https:\/\/github.com\/huggingface\/mirror_canonical_datasets_on_hub","I understand that the datasets are mirrored on the Hub now, right? Might I close @lhoestq @SBrandeis?"],"created_at":1638290525000,"updated_at":1643208457000,"closed_at":1643208457000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"- [ ] create a repo on https:\/\/hf.co\/datasets for every canonical dataset\r\n- [ ] on every commit related to a dataset, update the hf.co repo\r\n\r\nSee https:\/\/github.com\/huggingface\/moon-landing\/pull\/1562\r\n\r\n@SBrandeis: I let you edit this description if needed to precise the intent.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3341\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3341\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3340","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3340\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3340\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3340\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3340","id":1067292636,"node_id":"PR_kwDODunzps4vMP6Z","number":3340,"title":"Fix JSON ClassLabel casting for integers","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1638281994000,"updated_at":1638358050000,"closed_at":1638358050000,"author_association":"MEMBER","active_lock_reason":null,"body":"Loading a JSON dataset with ClassLabel feature types currently fails if the JSON data already has integers. Indeed currently it tries to convert the strings to integers without even checking if the data are not integers already.\r\n\r\nFor example this currently fails:\r\n```python\r\nfrom datasets import load_dataset, Features, ClassLabel\r\n\r\npath = \"data.json\"\r\nf = Features({\"a\": ClassLabel(names=[\"neg\", \"pos\"])})\r\nd = load_dataset(\"json\", data_files=path, features=f)\r\n```\r\ndata.json\r\n```json\r\n{\"a\": 0}\r\n{\"a\": 1}\r\n```\r\n\r\nI fixed that by adding a line that checks the type of the JSON data before trying to convert them\r\n\r\ncc @albertvillanova let me know if it sounds good to you","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3340\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3340\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3340","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3340","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3340.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3340.patch","merged_at":1638358050000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3339","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3339\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3339\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3339\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3339","id":1066662477,"node_id":"I_kwDODunzps4_k_pN","number":3339,"title":"to_tf_dataset fails on TPU","user":{"login":"nbroad1881","id":24982805,"node_id":"MDQ6VXNlcjI0OTgyODA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24982805?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nbroad1881","html_url":"https:\/\/github.com\/nbroad1881","followers_url":"https:\/\/api.github.com\/users\/nbroad1881\/followers","following_url":"https:\/\/api.github.com\/users\/nbroad1881\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nbroad1881\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nbroad1881\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nbroad1881\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nbroad1881\/orgs","repos_url":"https:\/\/api.github.com\/users\/nbroad1881\/repos","events_url":"https:\/\/api.github.com\/users\/nbroad1881\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nbroad1881\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["This might be related to https:\/\/github.com\/tensorflow\/tensorflow\/issues\/38762 , what do you think @Rocketknight1 ?\r\n> Dataset.from_generator is expected to not work with TPUs as it uses py_function underneath which is incompatible with Cloud TPU 2VM setup. If you would like to read from large datasets, maybe try to materialize it on disk and use TFRecordDataest instead.","Hi @lhoestq @nbroad1881, I think it's very similar, yes. Unfortunately `to_tf_dataset` uses `tf.numpy_function` which can't be compiled - this is a necessary evil to load from the underlying Arrow dataset. We need to update the notebooks\/examples to clarify that this won't work, or to identify a workaround. You may be able to get it to work on an actual cloud TPU VM, but those are quite new and we haven't tested it yet. ","Thank you for the explanation. I didn't realize the nuances of `tf.numpy_function`. In this scenario, would it be better to use `export(format='tfrecord')` ? It's not quite the same, but for very large datasets that don't fit in memory it looks like it is the only option. I haven't used `export` before, but I do recall reading that there are suggestions for how big and how many tfrecords there should be to not bottleneck the TPU. It might be nice if there were a way for the `export` method to split the files up into appropriate chunk sizes depending on the size of the dataset and the number of devices. And if that is too much, it would be nice to be able to specify the number of files that would be created when using `export`. Well... maybe the user should just do the chunking themselves and call `export` a bunch of times. Whatever the case, you have been helpful. Thanks Tensorflow boy ;-) ","Yeah, this is something we really should have a proper guide on. I'll make a note to test some things and make a 'TF TPU best practices' notebook at some point, but in the meantime I think your solution of exporting TFRecords will probably work. ","Also: I knew that tweet would haunt me"],"created_at":1638233452000,"updated_at":1638454887000,"closed_at":null,"author_association":"NONE","active_lock_reason":null,"body":"Using `to_tf_dataset` to create a dataset and then putting it in `model.fit` results in an internal error on TPUs. I've only tried on Colab and Kaggle TPUs, not GCP TPUs.\r\n\r\n\r\n## Steps to reproduce the bug\r\nI made a colab to show the error. https:\/\/colab.research.google.com\/drive\/12x_PFKzGouFxqD4OuWfnycW_1TaT276z?usp=sharing\r\n\r\n## Expected results\r\ndataset from `to_tf_dataset` works in `model.fit` \r\nRight below the first error in the colab I use `tf.data.Dataset.from_tensor_slices` and `model.fit` works just fine. This is the desired outcome.\r\n\r\n## Actual results\r\n```\r\nInternalError: 5 root error(s) found.\r\n (0) INTERNAL: {{function_node __inference_train_function_30558}} failed to connect to all addresses\r\nAdditional GRPC error information from remote target \/job:localhost\/replica:0\/task:0\/device:CPU:0:\r\n:{\"created\":\"@1638231897.932218653\",\"description\":\"Failed to pick subchannel\",\"file\":\"third_party\/grpc\/src\/core\/ext\/filters\/client_channel\/client_channel.cc\",\"file_line\":3151,\"referenced_errors\":[{\"created\":\"@1638231897.932216754\",\"description\":\"failed to connect to all addresses\",\"file\":\"third_party\/grpc\/src\/core\/lib\/transport\/error_utils.cc\",\"file_line\":161,\"grpc_status\":14}]}\r\n\t [[{{node StatefulPartitionedCall}}]]\r\n\t [[MultiDeviceIteratorGetNextFromShard]]\r\nExecuting non-communication op originally returned UnavailableError, and was replaced by InternalError to avoid invoking TF network error handling logic.\r\n\t [[RemoteCall]]\r\n\t [[IteratorGetNextAsOptional]]\r\n\t [[tpu_compile_succeeded_assert\/_14023832043698465348\/_7\/_439]]\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.16.1\r\n- Platform: Linux-5.4.104+-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.12\r\n- PyArrow version: 3.0.0\r\n- Tensorflow 2.7.0\r\n- `transformers` 4.12.5\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3339\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3339\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3338","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3338\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3338\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3338\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3338","id":1066371235,"node_id":"PR_kwDODunzps4vJRFM","number":3338,"title":"[WIP] Add doctests for tutorials","user":{"login":"stevhliu","id":59462357,"node_id":"MDQ6VXNlcjU5NDYyMzU3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59462357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stevhliu","html_url":"https:\/\/github.com\/stevhliu","followers_url":"https:\/\/api.github.com\/users\/stevhliu\/followers","following_url":"https:\/\/api.github.com\/users\/stevhliu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stevhliu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stevhliu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stevhliu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stevhliu\/orgs","repos_url":"https:\/\/api.github.com\/users\/stevhliu\/repos","events_url":"https:\/\/api.github.com\/users\/stevhliu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stevhliu\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["I manage to remove the mentions of ellipsis in the code by launching the command as follows:\r\n\r\n```\r\npython -m doctest -v docs\/source\/load_hub.rst -o=ELLIPSIS\r\n```\r\n\r\nThe way you put your ellipsis will only work on mac, I've adapted it for linux as well with the following:\r\n\r\n```diff\r\n >>> from datasets import load_dataset_builder\r\n >>> dataset_builder = load_dataset_builder('imdb')\r\n- >>> print(dataset_builder.cache_dir) #doctest: +ELLIPSIS\r\n- \/Users\/...\/.cache\/huggingface\/datasets\/imdb\/plain_text\/1.0.0\/...\r\n+ >>> print(dataset_builder.cache_dir)\r\n+ \/...\/.cache\/huggingface\/datasets\/imdb\/plain_text\/1.0.0\/...\r\n```\r\n\r\nThis passes on my machine:\r\n\r\n```\r\nTrying:\r\n print(dataset_builder.cache_dir)\r\nExpecting:\r\n \/...\/.cache\/huggingface\/datasets\/imdb\/plain_text\/1.0.0\/...\r\nok\r\n```\r\n\r\nI'm getting a last error:\r\n\r\n```py\r\nExpected:\r\n DatasetDict({\r\n train: Dataset({\r\n features: ['sentence1', 'sentence2', 'label', 'idx'],\r\n num_rows: 3668\r\n })\r\n validation: Dataset({\r\n features: ['sentence1', 'sentence2', 'label', 'idx'],\r\n num_rows: 408\r\n })\r\n test: Dataset({\r\n features: ['sentence1', 'sentence2', 'label', 'idx'],\r\n num_rows: 1725\r\n })\r\n })\r\nGot:\r\n DatasetDict({\r\n train: Dataset({\r\n features: ['idx', 'label', 'sentence1', 'sentence2'],\r\n num_rows: 3668\r\n })\r\n validation: Dataset({\r\n features: ['idx', 'label', 'sentence1', 'sentence2'],\r\n num_rows: 408\r\n })\r\n test: Dataset({\r\n features: ['idx', 'label', 'sentence1', 'sentence2'],\r\n num_rows: 1725\r\n })\r\n })\r\n```\r\n\r\nBut this is due to `doctest` looking for an exact match and the list having an unordered print order. I wish `doctest` would be a bit more flexible with that."],"created_at":1638211246000,"updated_at":1641497763000,"closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Opening a PR as discussed with @LysandreJik for some help with doctest issues. The goal is to add doctests for each of the tutorials in the documentation to make sure the code samples work as shown.\r\n\r\n### Issues\r\n\r\nA doctest has been added in the docstring of the `load_dataset_builder` function in `load.py` to handle variable outputs with the `ELLIPSIS` directive. When I run doctest on the `load_hub.rst` file, doctest should recognize the expected output from the docstring, and the corresponding code sample in `load_hub.rst` should pass. I am having the same issue with handling tracebacks in the `load_dataset` function.\r\n\r\nFrom the docstring:\r\n```\r\n>>> dataset_builder.cache_dir #doctest: +ELLIPSIS\r\n\/Users\/...\/.cache\/huggingface\/datasets\/imdb\/plain_text\/1.0.0\/...\r\n```\r\nTest result:\r\n```\r\nFailed example:\r\n dataset_builder.cache_dir\r\nExpected:\r\n \/Users\/...\/.cache\/huggingface\/datasets\/imdb\/plain_text\/1.0.0\/...\r\nGot:\r\n \/Users\/steven\/.cache\/huggingface\/datasets\/imdb\/plain_text\/1.0.0\/2fdd8b9bcadd6e7055e742a706876ba43f19faee861df134affd7a3f60fc38a1\r\n```\r\n\r\nI am able to get the doctest to pass by adding the doctest directives (`ELLIPSIS` and `NORMALIZE_WHITESPACE`) to the code samples in the `rst` file directly. But my understanding is that these directives should also work in the docstrings of the functions. I am running the test from the root of the directory:\r\n\r\n```\r\npython -m doctest -v docs\/source\/load_hub.rst\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3338\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3338\/timeline","performed_via_github_app":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3338","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3338","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3338.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3338.patch","merged_at":null},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3337","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3337\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3337\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3337\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3337","id":1066232936,"node_id":"I_kwDODunzps4_jWxo","number":3337,"title":"Typing of Dataset.__getitem__ could be improved.","user":{"login":"Dref360","id":8976546,"node_id":"MDQ6VXNlcjg5NzY1NDY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8976546?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Dref360","html_url":"https:\/\/github.com\/Dref360","followers_url":"https:\/\/api.github.com\/users\/Dref360\/followers","following_url":"https:\/\/api.github.com\/users\/Dref360\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Dref360\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Dref360\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Dref360\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Dref360\/orgs","repos_url":"https:\/\/api.github.com\/users\/Dref360\/repos","events_url":"https:\/\/api.github.com\/users\/Dref360\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Dref360\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"Dref360","id":8976546,"node_id":"MDQ6VXNlcjg5NzY1NDY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8976546?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Dref360","html_url":"https:\/\/github.com\/Dref360","followers_url":"https:\/\/api.github.com\/users\/Dref360\/followers","following_url":"https:\/\/api.github.com\/users\/Dref360\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Dref360\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Dref360\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Dref360\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Dref360\/orgs","repos_url":"https:\/\/api.github.com\/users\/Dref360\/repos","events_url":"https:\/\/api.github.com\/users\/Dref360\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Dref360\/received_events","type":"User","site_admin":false},"assignees":[{"login":"Dref360","id":8976546,"node_id":"MDQ6VXNlcjg5NzY1NDY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8976546?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Dref360","html_url":"https:\/\/github.com\/Dref360","followers_url":"https:\/\/api.github.com\/users\/Dref360\/followers","following_url":"https:\/\/api.github.com\/users\/Dref360\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Dref360\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Dref360\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Dref360\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Dref360\/orgs","repos_url":"https:\/\/api.github.com\/users\/Dref360\/repos","events_url":"https:\/\/api.github.com\/users\/Dref360\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Dref360\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi ! Thanks for the suggestion, I didn't know about this decorator.\r\n\r\nIf you are interesting in contributing, feel free to open a pull request to add the overload methods for each typing combination :) To assign you to this issue, you can comment `#self-assign` in this thread.\r\n\r\n`Dataset.__getitem__` is defined right here: https:\/\/github.com\/huggingface\/datasets\/blob\/e6f1352fe19679de897f3d962e616936a17094f5\/src\/datasets\/arrow_dataset.py#L1840","#self-assign"],"created_at":1638202811000,"updated_at":1639477734000,"closed_at":1639477734000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"## Describe the bug\r\n\r\nThe newly added typing for Dataset.__getitem__ is Union[Dict, List]. This makes tools like mypy a bit awkward to use as we need to check the type manually. We could use type overloading to make this easier. [Documentation](https:\/\/docs.python.org\/3\/library\/typing.html#typing.overload)\r\n\r\n## Steps to reproduce the bug\r\nLet's have a file `test.py`\r\n\r\n```python\r\nfrom typing import List, Dict, Any\r\n\r\nfrom datasets import Dataset\r\n\r\nds = Dataset.from_dict({\r\n 'a': [1,2,3],\r\n 'b': [\"1\", \"2\", \"3\"]\r\n})\r\n\r\none_colum: List[str] = ds['a']\r\nsome_index: Dict[Any, Any] = ds[1]\r\n```\r\n\r\n## Expected results\r\n\r\nRunning `mypy test.py` should not give any error.\r\n\r\n\r\n## Actual results\r\n\r\n```\r\ntest.py:10: error: Incompatible types in assignment (expression has type \"Union[Dict[Any, Any], List[Any]]\", variable has type \"List[str]\")\r\ntest.py:11: error: Incompatible types in assignment (expression has type \"Union[Dict[Any, Any], List[Any]]\", variable has type \"Dict[Any, Any]\")\r\nFound 2 errors in 1 file (checked 1 source file)\r\n```\r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.13.3\r\n- Platform: macOS-10.16-x86_64-i386-64bit\r\n- Python version: 3.8.8\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3337\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3337\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3336","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3336\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3336\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3336\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3336","id":1066208436,"node_id":"PR_kwDODunzps4vIwUE","number":3336,"title":"Add support for multiple dynamic dimensions and to_pandas conversion for dynamic arrays","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1638201539000,"updated_at":1638201539000,"closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Add support for multiple dynamic dimensions (e.g. `(None, None, 3)` for arbitrary sized images) and `to_pandas()` conversion for dynamic arrays.\r\n\r\nTODOs:\r\n* [ ] Cleaner code\r\n* [ ] Formatting issues (if NumPy doesn't allow broadcasting even though dtype is np.object)\r\n* [ ] Fix some issues with zero-dim tensors \r\n* [ ] Tests\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3336\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3336\/timeline","performed_via_github_app":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3336","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3336","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3336.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3336.patch","merged_at":null},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3335","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3335\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3335\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3335\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3335","id":1066064126,"node_id":"PR_kwDODunzps4vISGy","number":3335,"title":"add Speech commands dataset","user":{"login":"polinaeterna","id":16348744,"node_id":"MDQ6VXNlcjE2MzQ4NzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16348744?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/polinaeterna","html_url":"https:\/\/github.com\/polinaeterna","followers_url":"https:\/\/api.github.com\/users\/polinaeterna\/followers","following_url":"https:\/\/api.github.com\/users\/polinaeterna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/polinaeterna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/polinaeterna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/polinaeterna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/polinaeterna\/orgs","repos_url":"https:\/\/api.github.com\/users\/polinaeterna\/repos","events_url":"https:\/\/api.github.com\/users\/polinaeterna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/polinaeterna\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["@anton-l ping","@lhoestq \r\nHi Quentin! Thank you for your feedback and suggestions! \ud83e\udd17\r\n\r\nYes, that was actually what I wanted to do next - I mean the steaming stuff :)\r\nAlso, I need to make some changes to the readme (to account for the updated features set).\r\n\r\nHopefully, I will be done by tomorrow afternoon if that's ok. \r\n","@lhoestq Hi Quentin!\r\n\r\nI've implemented (hopefully, correctly) the streaming compatibility but the problem with the current approach is that we first need to iterate over the full archive anyway to get the list of filenames for train and validation sets (see [this](https:\/\/github.com\/huggingface\/datasets\/pull\/3335\/files#diff-aeea540d136025e30a842856779e9c6485a5dc6fc9eb7fd6d3be2acd2f49b8e3R186), the same approach is implemented in TFDS version). Only after that, we can generate examples, so we cannot stream the dataset before the first iteration ends and it takes some time. It's probably not the most effective way. \r\n\r\nIf the streaming mode is turned off, this approach (with two iterations) is actually slower than the previous implementation (with archive extraction). \r\n\r\nMy suggestion is to host separate archives for each split prepared in advance. That way there would be no need for iterating over the common archive to collect train and validation filenames. @anton-l suggested to make AWS mirrors for them. I've prepared these archives, for now you can take a look at them [here](https:\/\/drive.google.com\/drive\/folders\/1oMrZHzPgHAKprKJuvih91CM8KMSzh_pL?usp=sharing). I simplified their structure a bit so if we switch to using them, the code then should be changed (and simplified) a bit too.\r\n","Hi ! Thanks for the changes :)\r\n\r\n> My suggestion is to host separate archives for each split prepared in advance. That way there would be no need for iterating over the common archive to collect train and validation filenames. @anton-l suggested to make AWS mirrors for them. I've prepared these archives, for now you can take a look at them here. I simplified their structure a bit so if we switch to using them, the code then should be changed (and simplified) a bit too.\r\n\r\nI agree, I just uploaded them on AWS\r\n\r\nhttps:\/\/s3.amazonaws.com\/datasets.huggingface.co\/SpeechCommands\/v0.01\/v0.01_test.tar.gz\r\nhttps:\/\/s3.amazonaws.com\/datasets.huggingface.co\/SpeechCommands\/v0.01\/v0.01_train.tar.gz\r\nhttps:\/\/s3.amazonaws.com\/datasets.huggingface.co\/SpeechCommands\/v0.01\/v0.01_validation.tar.gz\r\nhttps:\/\/s3.amazonaws.com\/datasets.huggingface.co\/SpeechCommands\/v0.02\/v0.02_test.tar.gz\r\nhttps:\/\/s3.amazonaws.com\/datasets.huggingface.co\/SpeechCommands\/v0.02\/v0.02_validation.tar.gz\r\n\r\nNote that in the future we can move those files to actual repositories on the Hugging Face Hub, since we are migrating the datasets from this repository to the Hugging Face Hub (as mirrors), to make them more accessible to the community.","@lhoestq Thank you! Gonna look at this tomorrow :)","@lhoestq I've modified the code to fit new data format, now it works for v0.01 but doesn't work for v0.02 as the training archive is missing. Could you please create a mirror for that one too? You can find it [here](https:\/\/drive.google.com\/file\/d\/1mPjnVMYb-VhPprGlOX8v9TBT1GT-rtcp\/view?usp=sharing)\r\n\r\nAnd when it's done I'll need to regenerate all the meta \/ dummy stuff, and this version will be ready for a review :)","Here you go :)\r\nhttps:\/\/s3.amazonaws.com\/datasets.huggingface.co\/SpeechCommands\/v0.02\/v0.02_train.tar.gz","FYI I juste merged a fix for the Windows CI error on `master`, feel free to merge `master` again into your branch","All green ! I had to fix some minor stuff in the CI but it's good now\r\n\r\nNext step is to mark it as ready for review, and I think it's all good so we can merge \ud83d\ude80 ","@lhoestq \ud83e\udd17",":tada: "],"created_at":1638193967000,"updated_at":1639132641000,"closed_at":1639132215000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"closes #3283","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3335\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3335\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3335","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3335","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3335.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3335.patch","merged_at":1639132215000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3334","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3334\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3334\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3334\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3334","id":1065983923,"node_id":"I_kwDODunzps4_iZ-z","number":3334,"title":"Integrate Polars library","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["If possible, a neat API could be something like `Dataset.to_polars()`, as well as `Dataset.set_format(\"polars\")`","Note they use a \"custom\" implementation of Arrow: [Arrow2](https:\/\/github.com\/jorgecarleitao\/arrow2)."],"created_at":1638189114000,"updated_at":1638190872000,"closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"body":"Check potential integration of the Polars library: https:\/\/github.com\/pola-rs\/polars\r\n- Benchmark: https:\/\/h2oai.github.io\/db-benchmark\/\r\n\r\nCC: @thomwolf @lewtun \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3334\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3334\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3333","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3333\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3333\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3333\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3333","id":1065346919,"node_id":"I_kwDODunzps4_f-dn","number":3333,"title":" load JSON files, get the errors","user":{"login":"yanllearnn","id":38966558,"node_id":"MDQ6VXNlcjM4OTY2NTU4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38966558?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yanllearnn","html_url":"https:\/\/github.com\/yanllearnn","followers_url":"https:\/\/api.github.com\/users\/yanllearnn\/followers","following_url":"https:\/\/api.github.com\/users\/yanllearnn\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yanllearnn\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yanllearnn\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yanllearnn\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yanllearnn\/orgs","repos_url":"https:\/\/api.github.com\/users\/yanllearnn\/repos","events_url":"https:\/\/api.github.com\/users\/yanllearnn\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yanllearnn\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi ! The message you're getting is not an error. It simply says that your JSON dataset is being prepared to a location in `\/root\/.cache\/huggingface\/datasets`","> \r\n\r\nbut I want to load local JSON file by command\r\n`python3 run.py --do_train --task qa --dataset squad-retrain-data\/train-v2.0.json --output_dir .\/re_trained_model\/`\r\n\r\n**squad-retrain-data\/train-v2.0.json** is the local JSON file, how to load it and map it to a special structure?","You can load it with `dataset = datasets.load_dataset('json', data_files=args.dataset)` as you said.\r\nThen if you need to apply additional processing to map it to a special structure, you can use rename columns or use `dataset.map`. For more information, you can check the documentation here: https:\/\/huggingface.co\/docs\/datasets\/process.html\r\n\r\nAlso feel free to share your `run.py` code so we can take a look","```\r\n# Dataset selection\r\n if args.dataset.endswith('.json') or args.dataset.endswith('.jsonl'):\r\n dataset_id = None\r\n # Load from local json\/jsonl file\r\n dataset = datasets.load_dataset('json', data_files=args.dataset)\r\n # By default, the \"json\" dataset loader places all examples in the train split,\r\n # so if we want to use a jsonl file for evaluation we need to get the \"train\" split\r\n # from the loaded dataset\r\n eval_split = 'train'\r\n else:\r\n default_datasets = {'qa': ('squad',), 'nli': ('snli',)}\r\n dataset_id = tuple(args.dataset.split(':')) if args.dataset is not None else \\\r\n default_datasets[args.task]\r\n # MNLI has two validation splits (one with matched domains and one with mismatched domains). Most datasets just have one \"validation\" split\r\n eval_split = 'validation_matched' if dataset_id == ('glue', 'mnli') else 'validation'\r\n # Load the raw data\r\n dataset = datasets.load_dataset(*dataset_id)\r\n```\r\n\r\nI want to load JSON squad dataset instead `dataset = datasets.load_dataset('squad')` to retrain the model. \r\n","If your JSON has the same format as the SQuAD dataset, then you need to pass `field=\"data\"` to `load_dataset`, since the SQuAD format is one big JSON object in which the \"data\" field contains the list of questions and answers.\r\n```python\r\ndataset = datasets.load_dataset('json', data_files=args.dataset, field=\"data\")\r\n```\r\n\r\nLet me know if that helps :)\r\n\r\n","Yes, code works. but the format is not as expected.\r\n```\r\ndataset = datasets.load_dataset('json', data_files=args.dataset, field=\"data\")\r\n```\r\n```\r\npython3 run.py --do_train --task qa --dataset squad --output_dir .\/re_trained_model\/\r\n```\r\n************ train_dataset: Dataset({\r\n features: ['id', 'title', 'context', 'question', 'answers'],\r\n num_rows: 87599\r\n})\r\n\r\n\r\n```\r\npython3 run.py --do_train --task qa --dataset squad-retrain-data\/train-v2.0.json --output_dir .\/re_trained_model\/\r\n```\r\n************ train_dataset: Dataset({\r\n features: ['title', 'paragraphs'],\r\n num_rows: 442\r\n})\r\n\r\nI want the JSON to have the same format as before features. https:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/squad_v2\/squad_v2.py is the script dealing with **squad** but how can I apply it by using JSON? ","Ok I see, you have the paragraphs so you just need to process them to extract the questions and answers. I think you can process the SQuAD-like data this way:\r\n```python\r\ndef process_squad(articles):\r\n out = {\r\n \"title\": [],\r\n \"context\": [],\r\n \"question\": [],\r\n \"id\": [],\r\n \"answers\": [],\r\n }\r\n for title, paragraphs in zip(articles[\"title\"], articles[\"paragraphs\"]):\r\n for paragraph in paragraphs:\r\n for qa in paragraph[\"qas\"]:\r\n out[\"title\"].append(title)\r\n out[\"context\"].append(paragraph[\"context\"])\r\n out[\"question\"].append(qa[\"question\"])\r\n out[\"id\"].append(qa[\"id\"])\r\n out[\"answers\"].append({\r\n \"answer_start\": [answer[\"answer_start\"] for answer in qa[\"answers\"]],\r\n \"text\": [answer[\"text\"] for answer in qa[\"answers\"]],\r\n })\r\n return out\r\n\r\ndataset = dataset.map(process_squad, batched=True, remove_columns=[\"paragraphs\"])\r\n```\r\n\r\nI adapted the code from [squad.py](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/squad\/squad.py). The code takes as input a batch of articles (title + paragraphs) and gets all the questions and answers from the JSON structure.\r\n\r\nThe output is a dataset with `features: ['answers', 'context', 'id', 'question', 'title']`\r\n\r\nLet me know if that helps !\r\n","Yes, this works. But how to get the training output during training the squad by **Trainer** \r\nfor example https:\/\/github.com\/huggingface\/transformers\/blob\/master\/examples\/pytorch\/question-answering\/trainer_qa.py \r\nI want the training inputs, labels, outputs for every epoch and step to produce the training dynamic graph","I think you may need to implement your own Trainer, from the `QuestionAnsweringTrainer` for example.\r\nThis way you can have the flexibility of saving all the inputs\/output used at each step","does there have any function to be overwritten to do this?","> does there have any function to be overwritten to do this?\r\n\r\nok, I overwrote the compute_loss, thank you.","Hi, I add one field **example_id**, but I can't see it in the **comput_loss** function, how can I do this? below is the information of inputs\r\n\r\n```\r\n*********************** inputs: {'attention_mask': tensor([[1, 1, 1, ..., 0, 0, 0],\r\n [1, 1, 1, ..., 0, 0, 0],\r\n [1, 1, 1, ..., 0, 0, 0],\r\n ...,\r\n [1, 1, 1, ..., 0, 0, 0],\r\n [1, 1, 1, ..., 0, 0, 0],\r\n [1, 1, 1, ..., 0, 0, 0]], device='cuda:0'), 'end_positions': tensor([ 25, 97, 93, 44, 25, 112, 109, 134], device='cuda:0'), 'input_ids': tensor([[ 101, 2054, 2390, ..., 0, 0, 0],\r\n [ 101, 2054, 2515, ..., 0, 0, 0],\r\n [ 101, 2054, 2106, ..., 0, 0, 0],\r\n ...,\r\n [ 101, 2339, 2001, ..., 0, 0, 0],\r\n [ 101, 2054, 2515, ..., 0, 0, 0],\r\n [ 101, 2054, 2003, ..., 0, 0, 0]], device='cuda:0'), 'start_positions': tensor([ 20, 90, 89, 41, 25, 96, 106, 132], device='cuda:0'), 'token_type_ids': tensor([[0, 0, 0, ..., 0, 0, 0],\r\n [0, 0, 0, ..., 0, 0, 0],\r\n [0, 0, 0, ..., 0, 0, 0],\r\n ...,\r\n [0, 0, 0, ..., 0, 0, 0],\r\n [0, 0, 0, ..., 0, 0, 0],\r\n [0, 0, 0, ..., 0, 0, 0]], device='cuda:0')} \r\n```\r\n\r\n```\r\n# This function preprocesses a question answering dataset, tokenizing the question and context text\r\n# and finding the right offsets for the answer spans in the tokenized context (to use as labels).\r\n# Adapted from https:\/\/github.com\/huggingface\/transformers\/blob\/master\/examples\/pytorch\/question-answering\/run_qa.py\r\ndef prepare_train_dataset_qa(examples, tokenizer, max_seq_length=None):\r\n questions = [q.lstrip() for q in examples[\"question\"]]\r\n max_seq_length = tokenizer.model_max_length\r\n # tokenize both questions and the corresponding context\r\n # if the context length is longer than max_length, we split it to several\r\n # chunks of max_length\r\n tokenized_examples = tokenizer(\r\n questions,\r\n examples[\"context\"],\r\n truncation=\"only_second\",\r\n max_length=max_seq_length,\r\n stride=min(max_seq_length \/\/ 2, 128),\r\n return_overflowing_tokens=True,\r\n return_offsets_mapping=True,\r\n padding=\"max_length\"\r\n )\r\n\r\n # Since one example might give us several features if it has a long context,\r\n # we need a map from a feature to its corresponding example.\r\n sample_mapping = tokenized_examples.pop(\"overflow_to_sample_mapping\")\r\n # The offset mappings will give us a map from token to character position\r\n # in the original context. This will help us compute the start_positions\r\n # and end_positions to get the final answer string.\r\n offset_mapping = tokenized_examples.pop(\"offset_mapping\")\r\n\r\n tokenized_examples[\"start_positions\"] = []\r\n tokenized_examples[\"end_positions\"] = []\r\n\r\n tokenized_examples[\"example_id\"] = []\r\n\r\n for i, offsets in enumerate(offset_mapping):\r\n input_ids = tokenized_examples[\"input_ids\"][i]\r\n # We will label features not containing the answer the index of the CLS token.\r\n cls_index = input_ids.index(tokenizer.cls_token_id)\r\n sequence_ids = tokenized_examples.sequence_ids(i)\r\n # from the feature idx to sample idx\r\n sample_index = sample_mapping[i]\r\n # get the answer for a feature\r\n answers = examples[\"answers\"][sample_index]\r\n\r\n tokenized_examples[\"example_id\"].append(examples[\"id\"][sample_index])\r\n\r\n if len(answers[\"answer_start\"]) == 0:\r\n tokenized_examples[\"start_positions\"].append(cls_index)\r\n tokenized_examples[\"end_positions\"].append(cls_index)\r\n else:\r\n # Start\/end character index of the answer in the text.\r\n start_char = answers[\"answer_start\"][0]\r\n end_char = start_char + len(answers[\"text\"][0])\r\n\r\n # Start token index of the current span in the text.\r\n token_start_index = 0\r\n while sequence_ids[token_start_index] != 1:\r\n token_start_index += 1\r\n\r\n # End token index of the current span in the text.\r\n token_end_index = len(input_ids) - 1\r\n while sequence_ids[token_end_index] != 1:\r\n token_end_index -= 1\r\n\r\n # Detect if the answer is out of the span (in which case this feature is labeled with the CLS index).\r\n if not (offsets[token_start_index][0] <= start_char and\r\n offsets[token_end_index][1] >= end_char):\r\n tokenized_examples[\"start_positions\"].append(cls_index)\r\n tokenized_examples[\"end_positions\"].append(cls_index)\r\n else:\r\n # Otherwise move the token_start_index and token_end_index to the two ends of the answer.\r\n # Note: we could go after the last offset if the answer is the last word (edge case).\r\n while token_start_index < len(offsets) and \\\r\n offsets[token_start_index][0] <= start_char:\r\n token_start_index += 1\r\n tokenized_examples[\"start_positions\"].append(\r\n token_start_index - 1)\r\n while offsets[token_end_index][1] >= end_char:\r\n token_end_index -= 1\r\n tokenized_examples[\"end_positions\"].append(token_end_index + 1)\r\n\r\n return tokenized_examples\r\n```"],"created_at":1638109798000,"updated_at":1638351271000,"closed_at":1638331068000,"author_association":"NONE","active_lock_reason":null,"body":"Hi, does this bug be fixed? when I load JSON files, I get the same errors by the command \r\n`!python3 run.py --do_train --task qa --dataset squad-retrain-data\/train-v2.0.json --output_dir .\/re_trained_model\/`\r\n\r\nchange the dateset to load json by refering to https:\/\/huggingface.co\/docs\/datasets\/loading.html\r\n`dataset = datasets.load_dataset('json', data_files=args.dataset)`\r\n\r\nErrors:\r\n`Downloading and preparing dataset json\/default (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/root\/.cache\/huggingface\/datasets\/json\/default-c1e124ad488911b8\/0.0.0\/45636811569ec4a6630521c18235dfbbab83b7ab572e3393c5ba68ccabe98264...\r\n`\r\n\r\n_Originally posted by @yanllearnn in https:\/\/github.com\/huggingface\/datasets\/issues\/730#issuecomment-981095050_","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3333\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3333\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3332","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3332\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3332\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3332\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3332","id":1065345853,"node_id":"PR_kwDODunzps4vGBig","number":3332,"title":"Fix error message and add extension fallback","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1638109529000,"updated_at":1638192855000,"closed_at":1638192854000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Fix the error message raised if `infered_module_name` is `None` in `CommunityDatasetModuleFactoryWithoutScript.get_module` and make `infer_module_for_data_files` more robust. \r\n\r\nIn the linked issue, `infer_module_for_data_files` returns `None` because `json` is the second most common extension due to the suffix ordering. Now, we go from the most common to the least common extension and try to map it or return `None`.\r\n\r\nFix #3331","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3332\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3332\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3332","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3332","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3332.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3332.patch","merged_at":1638192854000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3331","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3331\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3331\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3331\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3331","id":1065275896,"node_id":"I_kwDODunzps4_ftH4","number":3331,"title":"AttributeError: 'CommunityDatasetModuleFactoryWithoutScript' object has no attribute 'path'","user":{"login":"luozhouyang","id":34032031,"node_id":"MDQ6VXNlcjM0MDMyMDMx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/34032031?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/luozhouyang","html_url":"https:\/\/github.com\/luozhouyang","followers_url":"https:\/\/api.github.com\/users\/luozhouyang\/followers","following_url":"https:\/\/api.github.com\/users\/luozhouyang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/luozhouyang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/luozhouyang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/luozhouyang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/luozhouyang\/orgs","repos_url":"https:\/\/api.github.com\/users\/luozhouyang\/repos","events_url":"https:\/\/api.github.com\/users\/luozhouyang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/luozhouyang\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi,\r\n\r\nthe fix was merged and will be available in the next release of `datasets`.\r\nIn the meantime, you can use it by installing `datasets` directly from master as follows:\r\n```\r\npip install git+https:\/\/github.com\/huggingface\/datasets.git\r\n```"],"created_at":1638089645000,"updated_at":1638193784000,"closed_at":1638192854000,"author_association":"NONE","active_lock_reason":null,"body":"## Describe the bug\r\nI add a new question answering dataset to huggingface datasets manually. Here is the link: [luozhouyang\/question-answering-datasets](https:\/\/huggingface.co\/datasets\/luozhouyang\/question-answering-datasets)\r\n\r\nBut when I load the dataset, an error raised: \r\n\r\n```bash\r\nAttributeError: 'CommunityDatasetModuleFactoryWithoutScript' object has no attribute 'path'\r\n```\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\ndataset = load_dataset(\"luozhouyang\/question-answering-datasets\", data_files=[\"dureader_robust.train.json\"])\r\n```\r\n\r\n## Expected results\r\nLoad dataset successfully without any error.\r\n\r\n## Actual results\r\n```bash\r\nTraceback (most recent call last):\r\n File \"\/mnt\/home\/zhouyang.lzy\/github\/naivenlp\/naivenlp\/tests\/question_answering_tests\/dataset_test.py\", line 89, in test_load_dataset_with_hf\r\n data_files=[\"dureader_robust.train.json\"],\r\n File \"\/mnt\/home\/zhouyang.lzy\/.conda\/envs\/naivenlp\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 1616, in load_dataset\r\n **config_kwargs,\r\n File \"\/mnt\/home\/zhouyang.lzy\/.conda\/envs\/naivenlp\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 1443, in load_dataset_builder\r\n path, revision=revision, download_config=download_config, download_mode=download_mode, data_files=data_files\r\n File \"\/mnt\/home\/zhouyang.lzy\/.conda\/envs\/naivenlp\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 1157, in dataset_module_factory\r\n raise e1 from None\r\n File \"\/mnt\/home\/zhouyang.lzy\/.conda\/envs\/naivenlp\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 1144, in dataset_module_factory\r\n download_mode=download_mode,\r\n File \"\/mnt\/home\/zhouyang.lzy\/.conda\/envs\/naivenlp\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 798, in get_module\r\n raise FileNotFoundError(f\"No data files or dataset script found in {self.path}\")\r\nAttributeError: 'CommunityDatasetModuleFactoryWithoutScript' object has no attribute 'path'\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.15.1\r\n- Platform: linux\r\n- Python version: 3.6.13\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3331\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3331\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3330","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3330\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3330\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3330\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3330","id":1065176619,"node_id":"PR_kwDODunzps4vFtF7","number":3330,"title":"Change TriviaQA license (#3313)","user":{"login":"avinashsai","id":22453634,"node_id":"MDQ6VXNlcjIyNDUzNjM0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22453634?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/avinashsai","html_url":"https:\/\/github.com\/avinashsai","followers_url":"https:\/\/api.github.com\/users\/avinashsai\/followers","following_url":"https:\/\/api.github.com\/users\/avinashsai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/avinashsai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/avinashsai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/avinashsai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/avinashsai\/orgs","repos_url":"https:\/\/api.github.com\/users\/avinashsai\/repos","events_url":"https:\/\/api.github.com\/users\/avinashsai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/avinashsai\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1638070005000,"updated_at":1638185061000,"closed_at":1638185061000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Fixes (#3313)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3330\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3330\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3330","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3330","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3330.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3330.patch","merged_at":1638185061000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3329","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3329\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3329\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3329\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3329","id":1065096971,"node_id":"I_kwDODunzps4_fBcL","number":3329,"title":"Map function: Type error on iter #999","user":{"login":"josephkready666","id":52659318,"node_id":"MDQ6VXNlcjUyNjU5MzE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/52659318?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/josephkready666","html_url":"https:\/\/github.com\/josephkready666","followers_url":"https:\/\/api.github.com\/users\/josephkready666\/followers","following_url":"https:\/\/api.github.com\/users\/josephkready666\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/josephkready666\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/josephkready666\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/josephkready666\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/josephkready666\/orgs","repos_url":"https:\/\/api.github.com\/users\/josephkready666\/repos","events_url":"https:\/\/api.github.com\/users\/josephkready666\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/josephkready666\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi, thanks for reporting.\r\n\r\nIt would be really helpful if you could provide the actual code of the `text_numbers_to_int` function so we can reproduce the error.","```\r\ndef text_numbers_to_int(text, column=\"\"):\r\n \"\"\"\r\n Convert text numbers to int.\r\n\r\n :param text: text numbers\r\n :return: int\r\n \"\"\"\r\n try:\r\n numbers = find_numbers(text)\r\n if not numbers:\r\n return text\r\n result = \"\"\r\n i, j = 0, 0\r\n while i < len(text):\r\n if j < len(numbers) and i == numbers[j][1]:\r\n n = int(numbers[j][0]) if numbers[j][0] % 1 == 0 else float(numbers[j][0])\r\n result += str(n)\r\n i = numbers[j][2] #end\r\n j += 1\r\n else:\r\n result += text[i]\r\n i += 1\r\n if column:\r\n return{column: result}\r\n else:\r\n return {column: result}\r\n except Exception as e:\r\n print(e)\r\n return {column: result}\r\n```","Maybe this is because of the `return text` line ? I think it should return a dictionary rather than a string","Yes that was it, good catch! Thanks"],"created_at":1638035585000,"updated_at":1638218415000,"closed_at":1638218415000,"author_association":"NONE","active_lock_reason":null,"body":"## Describe the bug\r\nUsing the map function, it throws a type error on iter #999\r\n\r\nHere is the code I am calling:\r\n```\r\ndataset = datasets.load_dataset('squad')\r\ndataset['validation'].map(text_numbers_to_int, input_columns=['context'], fn_kwargs={'column': 'context'})\r\n``` \r\ntext_numbers_to_int returns the input text with numbers replaced in the format {'context': text}\r\n\r\nIt happens at \r\n`\r\nFile \"C:\\Users\\lonek\\anaconda3\\envs\\ai\\Lib\\site-packages\\datasets\\arrow_writer.py\", line 289, in \r\n [row[0][col] for row in self.current_examples], type=col_type, try_type=col_try_type, col=col\r\n`\r\n\r\nThe issue is that the list comprehension expects self.current_examples to be type tuple(dict, str), but for some reason 26 out of 1000 of the sefl.current_examples are type tuple(str, str)\r\n\r\nHere is an example of what self.current_examples should be\r\n({'context': 'Super Bowl 50 was an...merals 50.'}, '')\r\n\r\nHere is an example of what self.current_examples are when it throws the error:\r\n('The Panthers used th... Marriott.', '')\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3329\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3329\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3328","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3328\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3328\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3328\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3328","id":1065015262,"node_id":"PR_kwDODunzps4vFTpW","number":3328,"title":"Quick fix error formatting","user":{"login":"NouamaneTazi","id":29777165,"node_id":"MDQ6VXNlcjI5Nzc3MTY1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29777165?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NouamaneTazi","html_url":"https:\/\/github.com\/NouamaneTazi","followers_url":"https:\/\/api.github.com\/users\/NouamaneTazi\/followers","following_url":"https:\/\/api.github.com\/users\/NouamaneTazi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NouamaneTazi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NouamaneTazi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NouamaneTazi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NouamaneTazi\/orgs","repos_url":"https:\/\/api.github.com\/users\/NouamaneTazi\/repos","events_url":"https:\/\/api.github.com\/users\/NouamaneTazi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NouamaneTazi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1638013668000,"updated_at":1638192762000,"closed_at":1638192762000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"While working on a dataset, I got the error\r\n```\r\nTypeError: Provided `function` which is applied to all elements of table returns a `dict` of types {[type(x) for x in processed_inputs.values()]}. When using `batched=True`, make sure provided `function` returns a `dict` of types like `{allowed_batch_return_types}`.\r\n```\r\n\r\nThis PR should fix the formatting of this error","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3328\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3328\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3328","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3328","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3328.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3328.patch","merged_at":1638192762000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3327","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3327\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3327\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3327\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3327","id":1064675888,"node_id":"I_kwDODunzps4_daow","number":3327,"title":"\"Shape of query is incorrect, it has to be either a 1D array or 2D (1, N)\"","user":{"login":"eliasws","id":19492473,"node_id":"MDQ6VXNlcjE5NDkyNDcz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19492473?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eliasws","html_url":"https:\/\/github.com\/eliasws","followers_url":"https:\/\/api.github.com\/users\/eliasws\/followers","following_url":"https:\/\/api.github.com\/users\/eliasws\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eliasws\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eliasws\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eliasws\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eliasws\/orgs","repos_url":"https:\/\/api.github.com\/users\/eliasws\/repos","events_url":"https:\/\/api.github.com\/users\/eliasws\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eliasws\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["#3323 "],"created_at":1637943996000,"updated_at":1637945051000,"closed_at":1637945051000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"## Describe the bug\r\nPassing a correctly shaped Numpy-Array to get_nearest_examples leads to the Exception\r\n\r\n\"Shape of query is incorrect, it has to be either a 1D array or 2D (1, N)\"\r\n\r\nProbably the reason for this is a wrongly converted assertion.\r\n\r\n1.15.1:\r\n\r\n`assert len(query.shape) == 1 or (len(query.shape) == 2 and query.shape[0] == 1)`\r\n\r\n1.16.1:\r\n\r\n```\r\n if len(query.shape) != 1 or (len(query.shape) == 2 and query.shape[0] != 1):\r\n raise ValueError(\"Shape of query is incorrect, it has to be either a 1D array or 2D (1, N)\")\r\n```\r\n\r\n## Steps to reproduce the bug\r\n\r\nfollow the steps described here: https:\/\/huggingface.co\/course\/chapter5\/6?fw=tf\r\n\r\n```python\r\n question_embedding.shape # (1, 768)\r\n\r\n scores, samples = embeddings_dataset.get_nearest_examples(\r\n \"embeddings\", question_embedding, k=5 # Error\r\n)\r\n\r\n# \"Shape of query is incorrect, it has to be either a 1D array or 2D (1, N)\"\r\n```\r\n\r\n## Expected results\r\nShould work without exception\r\n\r\n## Actual results\r\nThrows exception\r\n\r\n## Environment info\r\n- `datasets` version: 1.15.1\r\n- Platform: Darwin-20.6.0-x86_64-i386-64bit\r\n- Python version: 3.7.12\r\n- PyArrow version: 6.0.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3327\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3327\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3326","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3326\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3326\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3326\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3326","id":1064664479,"node_id":"PR_kwDODunzps4vEaYG","number":3326,"title":"Fix import `datasets` on python 3.10","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1637943000000,"updated_at":1637944283000,"closed_at":1637944283000,"author_association":"MEMBER","active_lock_reason":null,"body":"In python 3.10 it's no longer possible to use `functools.wraps` on a method decorated with `classmethod`.\r\nTo fix this I inverted the order of the `inject_arrow_table_documentation` and `classmethod` decorators\r\n\r\nFix #3324 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3326\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3326\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3326","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3326","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3326.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3326.patch","merged_at":1637944283000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3325","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3325\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3325\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3325\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3325","id":1064663075,"node_id":"PR_kwDODunzps4vEaGO","number":3325,"title":"Update conda dependencies","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1637942887000,"updated_at":1637943637000,"closed_at":1637943636000,"author_association":"MEMBER","active_lock_reason":null,"body":"Some dependencies minimum versions were outdated. For example `pyarrow` and `huggingface_hub`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3325\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3325\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3325","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3325","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3325.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3325.patch","merged_at":1637943636000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3324","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3324\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3324\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3324\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3324","id":1064661212,"node_id":"I_kwDODunzps4_dXDc","number":3324,"title":"Can't import `datasets` in python 3.10","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1637942774000,"updated_at":1637944283000,"closed_at":1637944283000,"author_association":"MEMBER","active_lock_reason":null,"body":"When importing `datasets` I'm getting this error in python 3.10:\r\n```python\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/Users\/quentinlhoest\/Desktop\/hf\/nlp\/src\/datasets\/__init__.py\", line 34, in \r\n from .arrow_dataset import Dataset, concatenate_datasets\r\n File \"\/Users\/quentinlhoest\/Desktop\/hf\/nlp\/src\/datasets\/arrow_dataset.py\", line 47, in \r\n from .arrow_reader import ArrowReader\r\n File \"\/Users\/quentinlhoest\/Desktop\/hf\/nlp\/src\/datasets\/arrow_reader.py\", line 33, in \r\n from .table import InMemoryTable, MemoryMappedTable, Table, concat_tables\r\n File \"\/Users\/quentinlhoest\/Desktop\/hf\/nlp\/src\/datasets\/table.py\", line 334, in \r\n class InMemoryTable(TableBlock):\r\n File \"\/Users\/quentinlhoest\/Desktop\/hf\/nlp\/src\/datasets\/table.py\", line 361, in InMemoryTable\r\n def from_pandas(cls, *args, **kwargs):\r\n File \"\/Users\/quentinlhoest\/Desktop\/hf\/nlp\/src\/datasets\/table.py\", line 24, in wrapper\r\n out = wraps(arrow_table_method)(method)\r\n File \"\/Users\/quentinlhoest\/.pyenv\/versions\/3.10.0\/lib\/python3.10\/functools.py\", line 61, in update_wrapper\r\n wrapper.__wrapped__ = wrapped\r\nAttributeError: readonly attribute\r\n```\r\n\r\nThis makes the conda build fail.\r\nI'm opening a PR to fix this and do a patch release 1.16.1","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3324\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3324\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3323","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3323\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3323\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3323\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3323","id":1064660452,"node_id":"PR_kwDODunzps4vEZwq","number":3323,"title":"Fix wrongly converted assert","user":{"login":"eliasws","id":19492473,"node_id":"MDQ6VXNlcjE5NDkyNDcz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19492473?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eliasws","html_url":"https:\/\/github.com\/eliasws","followers_url":"https:\/\/api.github.com\/users\/eliasws\/followers","following_url":"https:\/\/api.github.com\/users\/eliasws\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eliasws\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eliasws\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eliasws\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eliasws\/orgs","repos_url":"https:\/\/api.github.com\/users\/eliasws\/repos","events_url":"https:\/\/api.github.com\/users\/eliasws\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eliasws\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Closes #3327 "],"created_at":1637942739000,"updated_at":1637945052000,"closed_at":1637945051000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Seems like this assertion was replaced by an exception but the condition got wrongly converted.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3323\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3323\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3323","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3323","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3323.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3323.patch","merged_at":1637945051000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3322","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3322\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3322\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3322\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3322","id":1064429705,"node_id":"PR_kwDODunzps4vD1Ct","number":3322,"title":"Add missing tags to XTREME","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1637930225000,"updated_at":1638193207000,"closed_at":1638193206000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Add missing tags to the XTREME benchmark for better discoverability.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3322\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3322\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3322","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3322","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3322.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3322.patch","merged_at":1638193206000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3321","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3321\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3321\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3321\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3321","id":1063858386,"node_id":"PR_kwDODunzps4vCBeI","number":3321,"title":"Update URL of tatoeba subset of xtreme","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["To be more precise: `os.path.join` is replaced on-the-fly by `xjoin` anyway with patching, to extend it to remote files<\/s>","Oh actually just ignore what I said: they were used to concatenate URLs, which is not recommended. Let me fix that again by appending using `+`"],"created_at":1637865751000,"updated_at":1637922630000,"closed_at":1637922630000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Updates the URL of the tatoeba subset of xtreme. Additionally, replaces `os.path.join` with `xjoin` to correctly join the URL segments on Windows.\r\n\r\nFix #3320 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3321\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3321\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3321","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3321","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3321.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3321.patch","merged_at":1637922629000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3320","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3320\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3320\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3320\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3320","id":1063531992,"node_id":"I_kwDODunzps4_ZDXY","number":3320,"title":"Can't get tatoeba.rus dataset","user":{"login":"mmg10","id":65535131,"node_id":"MDQ6VXNlcjY1NTM1MTMx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/65535131?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mmg10","html_url":"https:\/\/github.com\/mmg10","followers_url":"https:\/\/api.github.com\/users\/mmg10\/followers","following_url":"https:\/\/api.github.com\/users\/mmg10\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mmg10\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mmg10\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mmg10\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mmg10\/orgs","repos_url":"https:\/\/api.github.com\/users\/mmg10\/repos","events_url":"https:\/\/api.github.com\/users\/mmg10\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mmg10\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1637843471000,"updated_at":1637922629000,"closed_at":1637922629000,"author_association":"NONE","active_lock_reason":null,"body":"## Describe the bug\r\nIt gives an error.\r\n\r\n> FileNotFoundError: Couldn't find file at https:\/\/github.com\/facebookresearch\/LASER\/raw\/master\/data\/tatoeba\/v1\/tatoeba.rus-eng.rus\r\n\r\n## Steps to reproduce the bug\r\n```python\r\ndata=load_dataset(\"xtreme\",\"tatoeba.rus\", split=\"validation\")\r\n```\r\n\r\n## Solution\r\nThe library tries to access the **master** branch. In the github repo of facebookresearch, it is in the **main** branch.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3320\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3320\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3319","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3319\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3319\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3319\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3319","id":1062749654,"node_id":"PR_kwDODunzps4u-xdv","number":3319,"title":"Add push_to_hub docs","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Looks good to me! :)\r\n\r\nMaybe we can mention that users can also set the `private` argument if they want to keep their dataset private? It would lead nicely into the next section on Privacy.","Thanks for your comments, I fixed the capitalization for consistency and added an passage to mention the `private` parameter and to have a nice transition to the Privacy section :)\r\n\r\nI also added the login instruction that was missing before the user can actually upload a dataset."],"created_at":1637778071000,"updated_at":1637851666000,"closed_at":1637851666000,"author_association":"MEMBER","active_lock_reason":null,"body":"Since #3098 it's now possible to upload a dataset on the Hub directly from python using the `push_to_hub` method.\r\nI just added a section in the \"Upload a dataset to the Hub\" tutorial.\r\n\r\nI kept the section quite simple but let me know if it sounds good to you @LysandreJik @stevhliu :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3319\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3319\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3319","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3319","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3319.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3319.patch","merged_at":1637851666000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3318","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3318\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3318\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3318\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3318","id":1062369717,"node_id":"PR_kwDODunzps4u9m-k","number":3318,"title":"Finish transition to PyArrow 3.0.0","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1637757014000,"updated_at":1637768105000,"closed_at":1637768104000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Finish transition to PyArrow 3.0.0 that was started in #3098.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3318\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3318\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3318","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3318","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3318.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3318.patch","merged_at":1637768104000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3317","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3317\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3317\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3317\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3317","id":1062284447,"node_id":"I_kwDODunzps4_USyf","number":3317,"title":"Add desc parameter to Dataset filter method","user":{"login":"vblagoje","id":458335,"node_id":"MDQ6VXNlcjQ1ODMzNQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/458335?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vblagoje","html_url":"https:\/\/github.com\/vblagoje","followers_url":"https:\/\/api.github.com\/users\/vblagoje\/followers","following_url":"https:\/\/api.github.com\/users\/vblagoje\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vblagoje\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vblagoje\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vblagoje\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vblagoje\/orgs","repos_url":"https:\/\/api.github.com\/users\/vblagoje\/repos","events_url":"https:\/\/api.github.com\/users\/vblagoje\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vblagoje\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi,\r\n\r\n`Dataset.map` allows more generic transforms compared to `Dataset.filter`, which purpose is very specific (to filter examples based on a condition). That's why I don't think we need the `desc` parameter there for consistency. #3196 has added descriptions to the `Dataset` methods that call `.map` internally, but not for the `filter` method, so we should do that.\r\n\r\nDo you have a description in mind? Maybe `\"Filtering the dataset\"` or `\"Filtering the indices\"`? If yes, feel free to open a PR.","I'm personally ok with adding the `desc` parameter actually. Let's say you have different filters, it can be nice to differentiate between the different filters when they're running no ?","@mariosasko the use case is filtering of a dataset prior to tokenization and subsequent training. As the dataset is huge it's just a matter of giving a user (model trainer) some feedback on what's going on. Otherwise, feedback is given for all steps in training preparation and not for filtering and the filtering in my use case lasts about 4-5 minutes. And yes, if there are more filtering stages, as @lhoestq pointed out, it would be nice to give some feedback. I thought desc is there already and got confused when I got the script error. ","I don't have a strong opinion on that, so having `desc` as a parameter is also OK."],"created_at":1637751696000,"updated_at":1641407484000,"closed_at":1641407484000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nAs I was filtering very large datasets I noticed the filter method doesn't have the desc parameter which is available in the map method. Why don't we add a desc parameter to the filter method both for consistency and it's nice to give some feedback to users during long operations on Datasets?\r\n\r\n**Describe the solution you'd like**\r\nAdd desc parameter to Dataset filter method\r\n\r\n**Describe alternatives you've considered**\r\nN\/A\r\n\r\n**Additional context**\r\nN\/A\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3317\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3317\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3316","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3316\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3316\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3316\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3316","id":1062185822,"node_id":"I_kwDODunzps4_T6te","number":3316,"title":"Add RedCaps dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":3608941089,"node_id":"LA_kwDODunzps7XHBIh","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/vision","name":"vision","color":"bfdadc","default":false,"description":"Vision datasets"}],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1637745782000,"updated_at":1641996795000,"closed_at":1641996795000,"author_association":"MEMBER","active_lock_reason":null,"body":"## Adding a Dataset\r\n- **Name:** RedCaps\r\n- **Description:** Web-curated image-text data created by the people, for the people\r\n- **Paper:** https:\/\/arxiv.org\/abs\/2111.11431\r\n- **Data:** https:\/\/redcaps.xyz\/\r\n- **Motivation:** Multimodal image-text dataset: 12M+ Image-text pairs \r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n\r\nProposed by @patil-suraj ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3316\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3316\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3315","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3315\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3315\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3315\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3315","id":1061678452,"node_id":"PR_kwDODunzps4u7WpU","number":3315,"title":"Removing query params for dynamic URL caching","user":{"login":"anton-l","id":26864830,"node_id":"MDQ6VXNlcjI2ODY0ODMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26864830?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anton-l","html_url":"https:\/\/github.com\/anton-l","followers_url":"https:\/\/api.github.com\/users\/anton-l\/followers","following_url":"https:\/\/api.github.com\/users\/anton-l\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anton-l\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anton-l\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anton-l\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anton-l\/orgs","repos_url":"https:\/\/api.github.com\/users\/anton-l\/repos","events_url":"https:\/\/api.github.com\/users\/anton-l\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anton-l\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["IMO it makes more sense to have `ignore_url_params` as an attribute of `DownloadConfig` to avoid defining a new argument in `DownloadManger`'s methods.","@mariosasko that would make sense to me too, but it seems like `DownloadConfig` wasn't intended to be modified from a dataset loading script. @lhoestq wdyt?","We can expose `DownloadConfig` as a property of `DownloadManager`, and then in the script before the download call we could do: `dl_manager.download_config.ignore_url_params = True`. But yes, let's hear what Quentin thinks.","Oh indeed that's a great idea. This parameter is similar to others like `download_config.use_etag` that defines the behavior of the download and caching, so it's better if we have it there, and expose the `download_config`","Implemented it via `dl_manager.download_config.ignore_url_params` now, and also added a usage example above :) "],"created_at":1637699052000,"updated_at":1637851472000,"closed_at":1637851471000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"The main use case for this is to make dynamically generated private URLs (like the ones returned by CommonVoice API) compatible with the datasets' caching logic.\r\n\r\nUsage example:\r\n```python\r\nimport datasets\r\n\r\nclass CommonVoice(datasets.GeneratorBasedBuilder):\r\n def _info(self):\r\n return datasets.DatasetInfo()\r\n\r\n def _split_generators(self, dl_manager):\r\n dl_manager.download_config.ignore_url_params = True\r\n HUGE_URL = \"https:\/\/mozilla-common-voice-datasets.s3.dualstack.us-west-2.amazonaws.com\/cv-corpus-7.0-2021-07-21\/cv-corpus-7.0-2021-07-21-ab.tar.gz?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=ASIAQ3GQRTO3IU5JYB5K%2F20211125%2Fus-west-2%2Fs3%2Faws4_request&X-Amz-Date=20211125T131423Z&X-Amz-Expires=43200&X-Amz-Security-Token=FwoGZXIvYXdzEL7%2F%2F%2F%2F%2F%2F%2F%2F%2F%2FwEaDLsZw7Nj0d9h4rgheyKSBJJ6bxo1JdWLXAUhLMrUB8AXfhP8Ge4F8dtjwXmvGJgkIvdMT7P4YOEE1pS3mW8AyKsz7Z7IRVCIGQrOH1AbxGVVcDoCMMswXEOqL3nJFihKLf99%2F6l8iJVZdzftRUNgMhX5Hz0xSIL%2BzRDpH5nYa7C6YpEdOdW81CFVXybx7WUrX13wc8X4ZlUj7zrWcWf5p2VEIU5Utb7YHVi0Y5TQQiZSDoedQl0j4VmMuFkDzoobIO%2BvilgGeE2kIX0E62X423mEGNu4uQV5JsOuLAtv3GVlemsqEH3ZYrXDuxLmnvGj5HfMtySwI4vKv%2BlnnirD29o7hxvtidXiA8JMWhp93aP%2Fw7sod%2BPPbb5EqP%2B4Qb2GJ1myClOKcLEY0cqoy7XWm8NeVljLJojnFJVS5mNFBAzCCTJ%2FidxNsj8fflzkRoAzYaaPBuOTL1dgtZCdslK3FAuEvw0cik7P9A7IYiULV33otSHKMPcVfNHFsWQljs03gDztsIUWxaXvu6ck5vCcGULsHbfe6xoMPm2bR9jtKLONsslPcnzWIf7%2Fch2w%2F%2BjtTCd9IxaH4kytyJ6mIjpV%2FA%2F2h9qeDnDFsCphnMjAzPQn6tqCgTtPcyJ2b8c94ncgUnE4mepx%2FDa%2FanAEsrg9RPdmbdoPswzHn1IClh91IfSN74u95DZUxlPeZrHG5HxVCN3dKO6j%2Ft1xd20L0hEtazDdKOr8%2FYwGMirp8rp%2BII0pYOwQOrYHqH%2FREX2dRJctJtwE86Qj1eU8BAdXuFIkLC4NWXw%3D&X-Amz-Signature=1b8108d29b0e9c2bf6c7246e58ca8d5749a83de0704757ad8e8a44d78194691f&X-Amz-SignedHeaders=host\"\r\n dl_path = dl_manager.download_and_extract(HUGE_URL)\r\n print(dl_path)\r\n \r\n HUGE_URL += \"&some_new_or_changed_param=12345\"\r\n dl_path = dl_manager.download_and_extract(HUGE_URL)\r\n print(dl_path)\r\n\r\ndl_manager = datasets.DownloadManager(dataset_name=\"common_voice\")\r\nCommonVoice()._split_generators(dl_manager)\r\n```\r\n\r\nOutput:\r\n```\r\n\/home\/user\/.cache\/huggingface\/datasets\/downloads\/6ef2a377398ff3309554be040caa78414e6562d623dbd0ce8fc262459a7f8ec6\r\n\/home\/user\/.cache\/huggingface\/datasets\/downloads\/6ef2a377398ff3309554be040caa78414e6562d623dbd0ce8fc262459a7f8ec6\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3315\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3315\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3315","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3315","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3315.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3315.patch","merged_at":1637851471000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3314","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3314\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3314\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3314\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3314","id":1061448227,"node_id":"PR_kwDODunzps4u6mdX","number":3314,"title":"Adding arg to pass process rank to `map`","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Some commits seem to be there twice (made the mistake of rebasing because I wasn't sure whether the doc had changed), is this an issue @lhoestq ?"],"created_at":1637682921000,"updated_at":1637754853000,"closed_at":1637754853000,"author_association":"MEMBER","active_lock_reason":null,"body":"This PR adds a `with_rank` argument to `map` that gives the user the possibility to pass the rank of each process to their function. This is mostly designed for multi-GPU map (each process can be sent to a different device thanks to the rank). I've also added tests. I'm putting the PR up so you can check the code, I'll add a multi-GPU example to the doc (+ write a bit in the doc for the new arg)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3314\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3314\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3314","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3314","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3314.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3314.patch","merged_at":1637754853000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3313","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3313\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3313\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3313\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3313","id":1060933392,"node_id":"I_kwDODunzps4_PI8Q","number":3313,"title":"TriviaQA License Mismatch","user":{"login":"akhilkedia","id":16665267,"node_id":"MDQ6VXNlcjE2NjY1MjY3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16665267?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/akhilkedia","html_url":"https:\/\/github.com\/akhilkedia","followers_url":"https:\/\/api.github.com\/users\/akhilkedia\/followers","following_url":"https:\/\/api.github.com\/users\/akhilkedia\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/akhilkedia\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/akhilkedia\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/akhilkedia\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/akhilkedia\/orgs","repos_url":"https:\/\/api.github.com\/users\/akhilkedia\/repos","events_url":"https:\/\/api.github.com\/users\/akhilkedia\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/akhilkedia\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi ! You're completely right, this must be mentioned in the dataset card.\r\nIf you're interesting in contributing, feel free to open a pull request to mention this in the `trivia_qa` dataset card in the \"Licensing Information\" section at https:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/trivia_qa\/README.md"],"created_at":1637654415000,"updated_at":1638185061000,"closed_at":1638185061000,"author_association":"NONE","active_lock_reason":null,"body":"## Describe the bug\r\n\r\nTriviaQA Webpage at http:\/\/nlp.cs.washington.edu\/triviaqa\/ says they do not own the copyright to the data. However, Huggingface datasets at https:\/\/huggingface.co\/datasets\/trivia_qa mentions that the dataset is released under Apache License\r\n\r\nIs the License Information on HuggingFace correct?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3313\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3313\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3312","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3312\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3312\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3312\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3312","id":1060440346,"node_id":"PR_kwDODunzps4u3duV","number":3312,"title":"add bl books genre dataset","user":{"login":"davanstrien","id":8995957,"node_id":"MDQ6VXNlcjg5OTU5NTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8995957?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/davanstrien","html_url":"https:\/\/github.com\/davanstrien","followers_url":"https:\/\/api.github.com\/users\/davanstrien\/followers","following_url":"https:\/\/api.github.com\/users\/davanstrien\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/davanstrien\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/davanstrien\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/davanstrien\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/davanstrien\/orgs","repos_url":"https:\/\/api.github.com\/users\/davanstrien\/repos","events_url":"https:\/\/api.github.com\/users\/davanstrien\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/davanstrien\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["To fix the CI, feel free to run the `make style` command to format the code.\r\n\r\nThen it also looks like the dummy_data.zip archives are all empty, which makes the tests fail. Can you try regenerating them ? They should have one file inside which is a dummy version of the file at https:\/\/bl.iro.bl.uk\/downloads\/36c7cd20-c8a7-4495-acbe-469b9132c6b1?locale=en","@lhoestq, thanks for that feedback. \r\n\r\nI should have made most of these changes now. The `--auto_generate` flag wasn't working because the file wasn't downloaded with a `.csv` extension. I used `--match_text_files \"*\"` to get around this. Because there is a lot of data that isn't annotated using the default line number for the dummy data causes the `annotated_raw` and the `title_genre_classifiction` configs to fail because they don't generate any examples \u2014 bumping the line numbers to `250` fixes this. This does make the dummy data a bit bigger, though. \r\n\r\nThe total directory size for the dataset is now `150kb`. Is this okay, or do you want me to generate the dummy data manually instead? ","Hi ! yes 150kB is fine :)\r\nFeel free to push your new dummy_data.zip files (I think the current one are still the empty ones)","@lhoestq I've pushed those dummy files now and added your other suggestions.","The CI failure is unrelated to this PR, merging :)","@lhoestq, thanks for all your help with this pull request \ud83d\ude00"],"created_at":1637603690000,"updated_at":1638461429000,"closed_at":1638461267000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"First of all thanks for the fantastic library\/collection of datasets \ud83e\udd17\r\n\r\nThis pull request adds a dataset of metadata from digitised (mostly 19th Century) books from the British Library The [data](https:\/\/bl.iro.bl.uk\/concern\/datasets\/1e1ccb46-65b4-4481-b6f8-b8129d5da053) contains various metadata about the books. In addition, a subset of the data includes 'genre' information which can be used for supervised text classification tasks. I hope that this offers easier access to a dataset for doing text classification on GLAM (galleries, libraries, archives and museums) data. \r\n\r\nI have tried to create three configurations that provide both an 'easy' version of the dataset if you want to use it for training a genre classification model and a more 'raw' version of the data for other potential use cases for the data. I am open to suggestions if this doesn't make sense. \r\n\r\nSimilarly, for some of the arrow datatypes, I have had to fall back to strings since there are missing values for some fields\/rows but I may have missed a more elegant way of dealing with it. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3312\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3312\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3312","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3312","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3312.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3312.patch","merged_at":1638461267000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3311","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3311\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3311\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3311\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3311","id":1060387957,"node_id":"I_kwDODunzps4_NDx1","number":3311,"title":"Add WebSRC","user":{"login":"NielsRogge","id":48327001,"node_id":"MDQ6VXNlcjQ4MzI3MDAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48327001?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NielsRogge","html_url":"https:\/\/github.com\/NielsRogge","followers_url":"https:\/\/api.github.com\/users\/NielsRogge\/followers","following_url":"https:\/\/api.github.com\/users\/NielsRogge\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NielsRogge\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NielsRogge\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NielsRogge\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NielsRogge\/orgs","repos_url":"https:\/\/api.github.com\/users\/NielsRogge\/repos","events_url":"https:\/\/api.github.com\/users\/NielsRogge\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NielsRogge\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1637600313000,"updated_at":1637600313000,"closed_at":null,"author_association":"NONE","active_lock_reason":null,"body":"## Adding a Dataset\r\n- **Name:** WebSRC\r\n- **Description:** WebSRC is a novel Web-based Structural Reading Comprehension dataset. It consists of 0.44M question-answer pairs, which are collected from 6.5K web pages with corresponding HTML source code, screenshots and metadata. \r\n- **Paper:** https:\/\/arxiv.org\/abs\/2101.09465\r\n- **Data:** https:\/\/x-lance.github.io\/WebSRC\/dashboard.html#\r\n- **Motivation:** Currently adding MarkupLM to HuggingFace Transformers, which achieves SOTA on this dataset.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3311\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3311\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3310","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3310\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3310\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3310\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3310","id":1060098104,"node_id":"I_kwDODunzps4_L9A4","number":3310,"title":"Fatal error condition occurred in aws-c-io","user":{"login":"Crabzmatic","id":31850219,"node_id":"MDQ6VXNlcjMxODUwMjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31850219?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Crabzmatic","html_url":"https:\/\/github.com\/Crabzmatic","followers_url":"https:\/\/api.github.com\/users\/Crabzmatic\/followers","following_url":"https:\/\/api.github.com\/users\/Crabzmatic\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Crabzmatic\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Crabzmatic\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Crabzmatic\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Crabzmatic\/orgs","repos_url":"https:\/\/api.github.com\/users\/Crabzmatic\/repos","events_url":"https:\/\/api.github.com\/users\/Crabzmatic\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Crabzmatic\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi ! Are you having this issue only with this specific dataset, or it also happens with other ones like `squad` ?","@lhoestq It happens also on `squad`. It successfully downloads the whole dataset and then crashes on: \r\n\r\n```\r\nFatal error condition occurred in D:\\bld\\aws-c-io_1633633258269\\work\\source\\event_loop.c:74: aws_thread_launch(&cleanup_thread, s_event_loop_destroy_async_thread_fn, el_group, &thread_options) == AWS_OP_SUCCESS\r\nExiting Application\r\n```\r\n\r\nI tested it on Ubuntu and its working OK. Didn't test on non-preview version of Windows 11, `Windows-10-10.0.22504-SP0` is a preview version, not sure if this is causing it.","I see the same error in Windows-10.0.19042 as of a few days ago:\r\n\r\n`Fatal error condition occurred in D:\\bld\\aws-c-io_1633633258269\\work\\source\\event_loop.c:74: aws_thread_launch(&cleanup_thread, s_event_loop_destroy_async_thread_fn, el_group, &thread_options) == AWS_OP_SUCCESS`\r\n\r\npython 3.8.12 h7840368_2_cpython conda-forge\r\nboto3 1.20.11 pyhd8ed1ab_0 conda-forge\r\nbotocore 1.23.11 pyhd8ed1ab_0 conda-forge\r\n\r\n...but I am not using `datasets` (although I might take a look now that I know about it!)\r\n\r\nThe error has occurred a few times over the last two days, but not consistently enough for me to get it with DEBUG. If there is any interest I can report back here, but it seems not unique to `datasets`.","I'm not sure what `datasets` has to do with a crash that seems related to `aws-c-io`, could it be an issue with your environment ?","> I'm not sure what `datasets` has to do with a crash that seems related to `aws-c-io`, could it be an issue with your environment ?\r\n\r\nAgreed, this issue is not likely a bug in datasets, since I get the identical error without datasets installed.","Will close this issue. Bug in `aws-c-io` shouldn't be in `datasets` repo. Nevertheless, it can be useful to know that it happens. Thanks @leehaust @lhoestq ","I have also had this issue since a few days, when running scripts using PyCharm in particular, but it does not seem to affect the script from running, only reporting this error at the end of the run.","I also get this issue, It appears after my script has finished running. I get the following error message\r\n```\r\nFatal error condition occurred in \/home\/conda\/feedstock_root\/build_artifacts\/aws-c-io_1637179816120\/work\/source\/event_loop.c:72: aws_thread_launch(&cleanup_thread, s_event_loop_destroy_async_thread_fn, el_group, &thread_options) == AWS_OP_SUCCESS\r\nExiting Application\r\n################################################################################\r\nStack trace:\r\n################################################################################\r\n\/home\/user_name\/conda_envs\/env_name\/lib\/python3.7\/site-packages\/pyarrow\/..\/..\/..\/.\/.\/libaws-c-common.so.1(aws_backtrace_print+0x59) [0x2aabe0479579]\r\n\/home\/user_name\/conda_envs\/env_name\/lib\/python3.7\/site-packages\/pyarrow\/..\/..\/..\/.\/.\/libaws-c-common.so.1(aws_fatal_assert+0x48) [0x2aabe04696c8]\r\n\/home\/user_name\/conda_envs\/env_name\/lib\/python3.7\/site-packages\/pyarrow\/..\/..\/..\/.\/.\/.\/libaws-c-io.so.1.0.0(+0x13ad3) [0x2aabe0624ad3]\r\n\/home\/user_name\/conda_envs\/env_name\/lib\/python3.7\/site-packages\/pyarrow\/..\/..\/..\/.\/.\/libaws-c-common.so.1(aws_ref_count_release+0x1d) [0x2aabe047b60d]\r\n\/home\/user_name\/conda_envs\/env_name\/lib\/python3.7\/site-packages\/pyarrow\/..\/..\/..\/.\/.\/.\/libaws-c-io.so.1.0.0(+0x113ca) [0x2aabe06223ca]\r\n\/home\/user_name\/conda_envs\/env_name\/lib\/python3.7\/site-packages\/pyarrow\/..\/..\/..\/.\/.\/libaws-c-common.so.1(aws_ref_count_release+0x1d) [0x2aabe047b60d]\r\n\/home\/user_name\/conda_envs\/env_name\/lib\/python3.7\/site-packages\/pyarrow\/..\/..\/..\/.\/.\/libaws-crt-cpp.so(_ZN3Aws3Crt2Io15ClientBootstrapD1Ev+0x3a) [0x2aabe041cf5a]\r\n\/home\/user_name\/conda_envs\/env_name\/lib\/python3.7\/site-packages\/pyarrow\/..\/..\/..\/.\/libaws-cpp-sdk-core.so(+0x5f570) [0x2aabe00eb570]\r\n\/lib64\/libc.so.6(+0x39ce9) [0x2aaaab835ce9]\r\n\/lib64\/libc.so.6(+0x39d37) [0x2aaaab835d37]\r\n\/lib64\/libc.so.6(__libc_start_main+0xfc) [0x2aaaab81e55c]\r\npython(+0x1c721d) [0x55555571b21d]\r\nAborted\r\n```\r\nI don't get this issue when running my code in a container, and it seems more relevant to PyArrow but thought a more complete stack trace might be helpful to someone\r\n","I created an issue on JIRA:\r\nhttps:\/\/issues.apache.org\/jira\/browse\/ARROW-15141","@CallumMcMahon Do you have a small reproducer for this problem on Linux? I can reproduce this on Windows but sadly not with linux."],"created_at":1637584074000,"updated_at":1639733245000,"closed_at":1638224557000,"author_association":"NONE","active_lock_reason":null,"body":"## Describe the bug\r\nFatal error when using the library\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset('wikiann', 'en')\r\n```\r\n\r\n## Expected results\r\nNo fatal errors\r\n\r\n## Actual results\r\n```\r\nFatal error condition occurred in D:\\bld\\aws-c-io_1633633258269\\work\\source\\event_loop.c:74: aws_thread_launch(&cleanup_thread, s_event_loop_destroy_async_thread_fn, el_group, &thread_options) == AWS_OP_SUCCESS\r\nExiting Application\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.15.2.dev0\r\n- Platform: Windows-10-10.0.22504-SP0\r\n- Python version: 3.8.12\r\n- PyArrow version: 6.0.0\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3310\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3310\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3309","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3309\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3309\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3309\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3309","id":1059496154,"node_id":"PR_kwDODunzps4u0Xgm","number":3309,"title":"fix: files counted twice in inferred structure","user":{"login":"borisdayma","id":715491,"node_id":"MDQ6VXNlcjcxNTQ5MQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/715491?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/borisdayma","html_url":"https:\/\/github.com\/borisdayma","followers_url":"https:\/\/api.github.com\/users\/borisdayma\/followers","following_url":"https:\/\/api.github.com\/users\/borisdayma\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/borisdayma\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/borisdayma\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/borisdayma\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/borisdayma\/orgs","repos_url":"https:\/\/api.github.com\/users\/borisdayma\/repos","events_url":"https:\/\/api.github.com\/users\/borisdayma\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/borisdayma\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["I see it creates some errors in the tests.\r\n\r\nAnother solution if needed is to add something like `data_files = list(set(data_files))` after [this line](https:\/\/github.com\/huggingface\/datasets\/blob\/8555197a3fe826e98bd0206c2d031c4488c53c5c\/src\/datasets\/data_files.py#L511)","Hi ! Thanks for the correction :)\r\n\r\nYour change seems right, let me look at the errors and try to fix this","Not sure if it's due to this change but I\u00a0tested `load_dataset('dalle-mini\/encoded-vqgan_imagenet_f16_16384', streaming=True)` and the `validation` set is empty.","So indeed there was an issue with the patterns `*` and `**\/*` that would return some files twice. This issue came from the fact that we were not using the right `glob`.\r\n\r\nIndeed we were using `Path.rglob` for local files and `Path.match` for remote files. Since these two methods don't have the same behavior for such patterns, I decided to change that.\r\n\r\nIn particular, we now use `glob.glob` (same as `fsspec` glob) as a reference for data files resolution from patterns. This is the same as dask for example.\r\n\r\n\/!\\ Here are some behaviors specific to `glob.glob` that are different from Path.glob, Path.match or fnmatch:\r\n- '*' matches only first level files\r\n- '**\/*' matches only at least second level files\r\n\r\nThis way we have a consistent behavior with respect to other python data libraries and there's no overlap anymore between the two patterns.\r\n\r\nSome implementations details:\r\n\r\nTo ensure that we have the same behavior for local files and for files in a remote dataset repository, I decided to use `fsspec` glob for both. This was made possible by implementing the `HfFileSystem` class as a `fsspec` filesystem.\r\n\r\nI pushed those changes directly to your PR - I hope you don't mind. I'm still fixing the remaining tests.\r\nPlease let me know if that solves your problem, and then we can merge !","There's still an issue with fsspec's glob - I'll take a look this afternoon","I just found out that actually glob.glob and fsspec glob are different haha\r\nglob.glob needs `**\/*` and recursive=True to look into deep subdirectories, while fsspec only requires `**`\r\n\r\nI think we can go with fsspec glob for consistency with dask and since it's our main tool for filesystems management","To recap:\r\n```\r\nWe use fsspec glob as a reference for data files resolution from patterns.\r\nThis is the same as dask for example.\r\n\r\n\/!\\ Here are some behaviors specific to fsspec glob that are different from glob.glob, Path.glob, Path.match or fnmatch:\r\n- '*' matches only first level items\r\n- '**' matches all items\r\n- '**\/*' matches all at least second level items\r\n\r\nMore generally:\r\n- `*`` matches any character except a forward-slash (to match just the file or directory name)\r\n- `**`` matches any character including a forward-slash \/\r\n```","lol Windows\u2026 Maybe `Pathlib` for the tests?\r\n\r\nI tested streaming a repo and it worked perfectly now!"],"created_at":1637531438000,"updated_at":1637686858000,"closed_at":1637686858000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Files were counted twice in a structure like:\r\n```\r\nmy_dataset_local_path\/\r\n\u251c\u2500\u2500 README.md\r\n\u2514\u2500\u2500 data\/\r\n \u251c\u2500\u2500 train\/\r\n \u2502 \u251c\u2500\u2500 shard_0.csv\r\n \u2502 \u251c\u2500\u2500 shard_1.csv\r\n \u2502 \u251c\u2500\u2500 shard_2.csv\r\n \u2502 \u2514\u2500\u2500 shard_3.csv\r\n \u2514\u2500\u2500 valid\/\r\n \u251c\u2500\u2500 shard_0.csv\r\n \u2514\u2500\u2500 shard_1.csv\r\n```\r\n\r\nThe reason is that they were matching both `*train*\/*` and `*train*\/**\/*`.\r\n\r\nThis PR fixes it. @lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3309\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3309\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3309","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3309","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3309.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3309.patch","merged_at":1637686858000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3308","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3308\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3308\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3308\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3308","id":1059255705,"node_id":"I_kwDODunzps4_IvWZ","number":3308,"title":"\"dataset_infos.json\" missing for chr_en and mc4","user":{"login":"amitness","id":8587189,"node_id":"MDQ6VXNlcjg1ODcxODk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8587189?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/amitness","html_url":"https:\/\/github.com\/amitness","followers_url":"https:\/\/api.github.com\/users\/amitness\/followers","following_url":"https:\/\/api.github.com\/users\/amitness\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/amitness\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/amitness\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/amitness\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/amitness\/orgs","repos_url":"https:\/\/api.github.com\/users\/amitness\/repos","events_url":"https:\/\/api.github.com\/users\/amitness\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/amitness\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi ! Thanks for reporting :) \r\nWe can easily add the metadata for `chr_en` IMO, but for mC4 it will take more time, since it requires to count the number of examples in each language","No problem. I am trying to do some analysis on the metadata of all available datasets. Is reading `metadata_infos.json` for each dataset the correct way to go? \r\n\r\nI noticed that the same information is also available as special variables inside .py file of each dataset. So, I was wondering if `metadata_infos.json` has been deprecated?\r\n\r\n\r\n","The `dataset_infos.json` files have more information and are made to be used to analyze the datasets without having to run\/parse the python scripts. Moreover some datasets on the Hugging face don't even have a python script, and for those ones we'll make tools to generate the JSON file automatically :)"],"created_at":1637453242000,"updated_at":1642600532000,"closed_at":null,"author_association":"NONE","active_lock_reason":null,"body":"## Describe the bug\r\nIn the repository, every dataset has its metadata in a file called`dataset_infos.json`. But, this file is missing for two datasets: `chr_en` and `mc4`.\r\n\r\n## Steps to reproduce the bug\r\nCheck [chr_en](https:\/\/github.com\/huggingface\/datasets\/tree\/master\/datasets\/chr_en) and [mc4](https:\/\/github.com\/huggingface\/datasets\/tree\/master\/datasets\/mc4)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3308\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3308\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3307","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3307\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3307\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3307\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3307","id":1059226297,"node_id":"PR_kwDODunzps4uzlWa","number":3307,"title":"Add IndoNLI dataset","user":{"login":"afaji","id":6201626,"node_id":"MDQ6VXNlcjYyMDE2MjY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6201626?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/afaji","html_url":"https:\/\/github.com\/afaji","followers_url":"https:\/\/api.github.com\/users\/afaji\/followers","following_url":"https:\/\/api.github.com\/users\/afaji\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/afaji\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/afaji\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/afaji\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/afaji\/orgs","repos_url":"https:\/\/api.github.com\/users\/afaji\/repos","events_url":"https:\/\/api.github.com\/users\/afaji\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/afaji\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["@lhoestq thanks for the review! I've modified the labels to follow other NLI datasets.\r\nPlease review my change and let me know if I miss anything."],"created_at":1637441163000,"updated_at":1637851908000,"closed_at":1637851908000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"This PR adds IndoNLI dataset, from https:\/\/aclanthology.org\/2021.emnlp-main.821\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3307\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3307\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3307","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3307","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3307.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3307.patch","merged_at":1637851908000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3306","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3306\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3306\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3306\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3306","id":1059185860,"node_id":"I_kwDODunzps4_IeTE","number":3306,"title":"nested sequence feature won't encode example if the first item of the outside sequence is an empty list","user":{"login":"function2-llx","id":38486514,"node_id":"MDQ6VXNlcjM4NDg2NTE0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38486514?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/function2-llx","html_url":"https:\/\/github.com\/function2-llx","followers_url":"https:\/\/api.github.com\/users\/function2-llx\/followers","following_url":"https:\/\/api.github.com\/users\/function2-llx\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/function2-llx\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/function2-llx\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/function2-llx\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/function2-llx\/orgs","repos_url":"https:\/\/api.github.com\/users\/function2-llx\/repos","events_url":"https:\/\/api.github.com\/users\/function2-llx\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/function2-llx\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["knock knock","Hi, thanks for reporting! I've linked a PR that should fix the issue.","I've checked the PR and it looks great, thanks a lot!"],"created_at":1637427474000,"updated_at":1638968535000,"closed_at":1638968535000,"author_association":"NONE","active_lock_reason":null,"body":"## Describe the bug\r\nAs the title, nested sequence feature won't encode example if the first item of the outside sequence is an empty list.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import Features, Sequence, ClassLabel\r\nfeatures = Features({\r\n 'x': Sequence(Sequence(ClassLabel(names=['a', 'b']))),\r\n})\r\nprint(features.encode_batch({\r\n 'x': [\r\n [['a'], ['b']],\r\n [[], ['b']],\r\n ]\r\n}))\r\n```\r\n\r\n## Expected results\r\nprint `{'x': [[[0], [1]], [[], ['1']]]}`\r\n\r\n## Actual results\r\nprint `{'x': [[[0], [1]], [[], ['b']]]}`\r\n\r\n## Environment info\r\n- `datasets` version: 1.15.1\r\n- Platform: Linux-5.13.0-21-generic-x86_64-with-glibc2.34\r\n- Python version: 3.9.7\r\n- PyArrow version: 6.0.0\r\n\r\n## Additional information\r\nI think the issue stems from [here](https:\/\/github.com\/huggingface\/datasets\/blob\/8555197a3fe826e98bd0206c2d031c4488c53c5c\/src\/datasets\/features\/features.py#L847-L848).\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3306\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":1},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3306\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3305","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3305\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3305\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3305\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3305","id":1059161000,"node_id":"PR_kwDODunzps4uzZWv","number":3305,"title":"asserts replaced with exception for ``fingerprint.py``, ``search.py``, ``arrow_writer.py`` and ``metric.py``","user":{"login":"Ishan-Kumar2","id":46553104,"node_id":"MDQ6VXNlcjQ2NTUzMTA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46553104?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Ishan-Kumar2","html_url":"https:\/\/github.com\/Ishan-Kumar2","followers_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/followers","following_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/orgs","repos_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/repos","events_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1637419883000,"updated_at":1637605472000,"closed_at":1637600893000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Addresses #3171 \r\nFixes exception for ``fingerprint.py``, ``search.py``, ``arrow_writer.py`` and ``metric.py`` and modified tests","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3305\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3305\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3305","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3305","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3305.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3305.patch","merged_at":1637600893000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3304","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3304\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3304\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3304\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3304","id":1059130494,"node_id":"I_kwDODunzps4_IQx-","number":3304,"title":"Dataset object has no attribute `to_tf_dataset`","user":{"login":"RajkumarGalaxy","id":59993678,"node_id":"MDQ6VXNlcjU5OTkzNjc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59993678?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/RajkumarGalaxy","html_url":"https:\/\/github.com\/RajkumarGalaxy","followers_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/followers","following_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/orgs","repos_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/repos","events_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["The issue is due to the older version of transformers and datasets. It has been resolved by upgrading their versions.\r\n\r\n```\r\n# upgrade transformers and datasets to latest versions\r\n!pip install --upgrade transformers\r\n!pip install --upgrade datasets\r\n```\r\n\r\nRegards!"],"created_at":1637409839000,"updated_at":1637478445000,"closed_at":1637478445000,"author_association":"NONE","active_lock_reason":null,"body":"I am following HuggingFace Course. I am at Fine-tuning a model. \r\nLink: https:\/\/huggingface.co\/course\/chapter3\/2?fw=tf\r\n\r\nI use tokenize_function and `map` as mentioned in the course to process data.\r\n\r\n`# define a tokenize function`\r\n`def Tokenize_function(example):`\r\n` return tokenizer(example['sentence'], truncation=True)`\r\n\r\n`# tokenize entire data`\r\n`tokenized_data = raw_data.map(Tokenize_function, batched=True)`\r\n\r\nI get Dataset object at this point. When I try converting this to a TF dataset object as mentioned in the course, it throws the following error.\r\n\r\n`# convert to TF dataset`\r\n`train_data = tokenized_data[\"train\"].to_tf_dataset( `\r\n` columns = ['attention_mask', 'input_ids', 'token_type_ids'], `\r\n` label_cols = ['label'], `\r\n` shuffle = True, `\r\n` collate_fn = data_collator, `\r\n` batch_size = 8 `\r\n`)`\r\n\r\nOutput:\r\n\r\n`---------------------------------------------------------------------------`\r\n`AttributeError Traceback (most recent call last)`\r\n`\/tmp\/ipykernel_42\/103099799.py in `\r\n` 1 # convert to TF dataset`\r\n`----> 2 train_data = tokenized_data[\"train\"].to_tf_dataset( \\`\r\n` 3 columns = ['attention_mask', 'input_ids', 'token_type_ids'], \\`\r\n` 4 label_cols = ['label'], \\`\r\n` 5 shuffle = True, \\`\r\n`AttributeError: 'Dataset' object has no attribute 'to_tf_dataset'`\r\n\r\nWhen I look for `dir(tokenized_data[\"train\"])`, there is no method or attribute in the name of `to_tf_dataset`.\r\n\r\nWhy do I get this error? And how to clear this?\r\n\r\nPlease help me.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3304\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3304\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3303","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3303\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3303\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3303\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3303","id":1059129732,"node_id":"I_kwDODunzps4_IQmE","number":3303,"title":"DataCollatorWithPadding: TypeError","user":{"login":"RajkumarGalaxy","id":59993678,"node_id":"MDQ6VXNlcjU5OTkzNjc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59993678?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/RajkumarGalaxy","html_url":"https:\/\/github.com\/RajkumarGalaxy","followers_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/followers","following_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/orgs","repos_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/repos","events_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["\r\n> \r\n> Input:\r\n> \r\n> ```\r\n> tokenizer = AutoTokenizer.from_pretrained(checkpoint)\r\n> data_collator = DataCollatorWithPadding(tokenizer=tokenizer, return_tensors=\"tf\")\r\n> ```\r\n> \r\n> Output:\r\n> \r\n> ```\r\n> TypeError Traceback (most recent call last)\r\n> \/tmp\/ipykernel_42\/1563280798.py in \r\n> 1 checkpoint = 'bert-base-uncased'\r\n> 2 tokenizer = AutoTokenizer.from_pretrained(checkpoint)\r\n> ----> 3 data_collator = DataCollatorWithPadding(tokenizer=tokenizer, return_tensors=\"pt\")\r\n> TypeError: __init__() got an unexpected keyword argument 'return_tensors'\r\n> ```\r\n> \r\n\r\nThe issue is due to the older version of transformers and datasets. It has been resolved by upgrading their versions.\r\n\r\n`# upgrade transformers and datasets to latest versions`\r\n`!pip install --upgrade transformers`\r\n`!pip install --upgrade datasets`\r\n\r\nCheers!"],"created_at":1637409595000,"updated_at":1637478337000,"closed_at":1637478337000,"author_association":"NONE","active_lock_reason":null,"body":"Hi,\r\nI am following the HuggingFace course. I am now at Fine-tuning [https:\/\/huggingface.co\/course\/chapter3\/3?fw=tf](https:\/\/huggingface.co\/course\/chapter3\/3?fw=tf). When I set up `DataCollatorWithPadding` as following I got an error while trying to reproduce the course code in Kaggle. This error occurs with either a CPU-only-device or a GPU-device.\r\n\r\nInput:\r\n```checkpoint = 'bert-base-uncased'\r\ntokenizer = AutoTokenizer.from_pretrained(checkpoint)\r\ndata_collator = DataCollatorWithPadding(tokenizer=tokenizer, return_tensors=\"tf\")\r\n```\r\n\r\nOutput:\r\n```---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\n\/tmp\/ipykernel_42\/1563280798.py in \r\n 1 checkpoint = 'bert-base-uncased'\r\n 2 tokenizer = AutoTokenizer.from_pretrained(checkpoint)\r\n----> 3 data_collator = DataCollatorWithPadding(tokenizer=tokenizer, return_tensors=\"pt\")\r\nTypeError: __init__() got an unexpected keyword argument 'return_tensors'\r\n```\r\n\r\nWhen I call `help` method, it too confirms that there is no argument `return_tensors`.\r\nInput:\r\n```\r\nhelp(DataCollatorWithPadding.__init__)\r\n```\r\nOutput:\r\n```\r\nHelp on function __init__ in module transformers.data.data_collator:\r\n\r\n__init__(self, tokenizer: transformers.tokenization_utils_base.PreTrainedTokenizerBase, padding: Union[bool, str, transformers.file_utils.PaddingStrategy] = True, max_length: Union[int, NoneType] = None, pad_to_multiple_of: Union[int, NoneType] = None) -> None\r\n```\r\n\r\nBut, the source file *[Data Collator - docs](https:\/\/huggingface.co\/transformers\/main_classes\/data_collator.html#datacollatorwithpadding)* says that there is such an argument. By default, it returns Pytorch tensors while I need TF tensors.\r\n\r\nWhere do I miss?\r\nPlease help me.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3303\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3303\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3302","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3302\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3302\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3302\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3302","id":1058907168,"node_id":"PR_kwDODunzps4uynjc","number":3302,"title":"fix old_val typo in f-string","user":{"login":"Mehdi2402","id":56029953,"node_id":"MDQ6VXNlcjU2MDI5OTUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56029953?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Mehdi2402","html_url":"https:\/\/github.com\/Mehdi2402","followers_url":"https:\/\/api.github.com\/users\/Mehdi2402\/followers","following_url":"https:\/\/api.github.com\/users\/Mehdi2402\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Mehdi2402\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Mehdi2402\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Mehdi2402\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Mehdi2402\/orgs","repos_url":"https:\/\/api.github.com\/users\/Mehdi2402\/repos","events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1637355068000,"updated_at":1637878483000,"closed_at":1637600659000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"\r\nThis PR is to correct a typo in #3277 that @Carlosbogo revieled in a comment.\r\n\r\nRelated closed issue : #3257 \r\n\r\nSorry about that \ud83d\ude05.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3302\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3302\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3302","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3302","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3302.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3302.patch","merged_at":1637600659000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3301","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3301\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3301\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3301\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3301","id":1058718957,"node_id":"PR_kwDODunzps4uyA9o","number":3301,"title":"Add wikipedia tags","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1637339965000,"updated_at":1637340570000,"closed_at":1637340569000,"author_association":"MEMBER","active_lock_reason":null,"body":"Add the missing tags to the wikipedia dataset card.\r\n\r\nI also added the missing languages code in our language codes list.\r\n\r\nThis should also fix the code snippet that is presented on the Hub to load the dataset: fix https:\/\/github.com\/huggingface\/datasets\/issues\/3292","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3301\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3301\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3301","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3301","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3301.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3301.patch","merged_at":1637340569000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3300","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3300\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3300\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3300\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3300","id":1058644459,"node_id":"I_kwDODunzps4_GaHr","number":3300,"title":"\u2753 Dataset loading script from Hugging Face Hub","user":{"login":"pietrolesci","id":61748653,"node_id":"MDQ6VXNlcjYxNzQ4NjUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/61748653?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pietrolesci","html_url":"https:\/\/github.com\/pietrolesci","followers_url":"https:\/\/api.github.com\/users\/pietrolesci\/followers","following_url":"https:\/\/api.github.com\/users\/pietrolesci\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pietrolesci\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pietrolesci\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pietrolesci\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pietrolesci\/orgs","repos_url":"https:\/\/api.github.com\/users\/pietrolesci\/repos","events_url":"https:\/\/api.github.com\/users\/pietrolesci\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pietrolesci\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"assignees":[{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi ! In the next version of `datasets`, your train and test splits will be correctly separated (changes from #3027) if you create a dataset repository with only your CSV files.\r\n\r\nAlso it seems that you overwrite the `data_files` and `data_dir` arguments in your code, when you instantiate the AGNewsConfig objects. Those parameters are not necessary since you already know which files you want to load.\r\n\r\nYou can find an example on how to specify which file the dataset has to download in this [example script](https:\/\/huggingface.co\/datasets\/lhoestq\/custom_squad\/blob\/main\/custom_squad.py#L101-L107):\r\n```python\r\n_URLS = {\r\n \"train\": \"train-v1.1.json\", # you can use a URL or a relative path from the python script to your file in the repository\r\n \"dev\": \"dev-v1.1.json\",\r\n}\r\n```\r\n```python\r\n def _split_generators(self, dl_manager):\r\n downloaded_files = dl_manager.download_and_extract(_URLS)\r\n\r\n return [\r\n datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={\"filepath\": downloaded_files[\"train\"]}),\r\n datasets.SplitGenerator(name=datasets.Split.VALIDATION, gen_kwargs={\"filepath\": downloaded_files[\"dev\"]}),\r\n ]\r\n```","Also I think the viewer will be updated when you fix the dataset script, let me know if it doesn't","Hi @lhoestq,\r\n\r\nThanks a lot for the super quick answer!\r\n\r\nYour suggestion solves my issue. I am now able to load the dataset properly \ud83d\ude80 \r\nHowever, the dataviewer is not working yet.\r\n\r\nReally, thanks a lot for your help and consideration!\r\n\r\nBest,\r\nPietro","Great ! We'll take a look at the viewer to fix it","@lhoestq I think I am having a related problem.\r\nMy call to load_dataset() looks like this:\r\n\r\n```\r\n datasets = load_dataset(\r\n os.path.abspath(layoutlmft.data.datasets.xfun.__file__),\r\n f\"xfun.{data_args.lang}\",\r\n additional_langs=data_args.additional_langs,\r\n keep_in_memory=True,\r\n )\r\n\r\n```\r\n\r\nMy _split_generation code is:\r\n\r\n```\r\n def _split_generators(self, dl_manager):\r\n \"\"\"Returns SplitGenerators.\"\"\"\r\n\r\n downloaded_file = dl_manager.download_and_extract(\"https:\/\/guillaumejaume.github.io\/FUNSD\/dataset.zip\")\r\n return [\r\n datasets.SplitGenerator(\r\n name=datasets.Split.TRAIN, gen_kwargs={\"filepath\": f\"{downloaded_file}\/dataset\/training_data\/\"}\r\n ),\r\n datasets.SplitGenerator(\r\n name=datasets.Split.TEST, gen_kwargs={\"filepath\": f\"{downloaded_file}\/dataset\/testing_data\/\"}\r\n ),\r\n ]\r\n\r\n```\r\nHowever I get the error \"TypeError: _generate_examples() got an unexpected keyword argument 'filepath'\"\r\nThe path looks right and I see the data in the path so I think the only problem I have is that it doesn't like the key \"filepath\". However, the documentation (example [here](https:\/\/huggingface.co\/datasets\/lhoestq\/custom_squad\/blob\/main\/custom_squad.py#L101-L107)) seems to show that this is the correct parameter. \r\n\r\nHere is the full stack trace:\r\n\r\n```\r\nDownloading and preparing dataset xfun\/xfun.en (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/Users\/caseygre\/.cache\/huggingface\/datasets\/xfun\/xfun.en\/0.0.0\/96b8cb7c57f6f822f0ab37ae3be7b82d84ac57062e774c9361ccf0a4b9ef61cc...\r\nTraceback (most recent call last):\r\n File \"\/Users\/caseygre\/PycharmProjects\/aegis-ml-new\/unilm\/venv-LayoutLM\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 574, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/Users\/caseygre\/PycharmProjects\/aegis-ml-new\/unilm\/venv-LayoutLM\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 652, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/Users\/caseygre\/PycharmProjects\/aegis-ml-new\/unilm\/venv-LayoutLM\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 975, in _prepare_split\r\n generator = self._generate_examples(**split_generator.gen_kwargs)\r\nTypeError: _generate_examples() got an unexpected keyword argument 'filepath'\r\npython-BaseException\r\n```","Hi ! The `gen_kwargs` dictionary is passed to `_generate_examples`, so in your case it must be defined this way:\r\n```python\r\ndef _generate_examples(self, filepath):\r\n ...\r\n```\r\n\r\nAnd here is an additional tip: you can use `os.path.join(downloaded_file, \"dataset\/testing_data\")` instead of `f\"downloaded_file}\/dataset\/testing_data\/\"` to get compatibility with Windows and streaming.\r\n\r\nIndeed Windows uses a backslash separator, not a slash, and streaming uses chained URLs (like `zip:\/\/dataset\/testing_data::https:\/\/https:\/\/guillaumejaume.github.io\/FUNSD\/dataset.zip` for example)","Thanks for you quick reply @lhoestq and so sorry for my very delayed response.\r\nWe have gotten around the error another way but I will try to duplicate this when I can. We may have had \"filepaths\" instead of \"filepath\" in our def of _generate_examples() and not noticed the difference. If I find a more useful answer for others I will add to this ticket so they know what the issue was.\r\nNote: we do have our own _generate_examples() defined with the same def as Quentin has. (But one version does have \"filepaths\".)\r\n","Fixed in the viewer: https:\/\/huggingface.co\/datasets\/pietrolesci\/ag_news"],"created_at":1637335252000,"updated_at":1640170676000,"closed_at":1640170676000,"author_association":"NONE","active_lock_reason":null,"body":"Hi there,\r\n\r\nI am trying to add my custom `ag_news` with its own loading script on the Hugging Face datasets hub. In particular, I would like to test the addition of a second configuration to the existing `ag_news` dataset. Once it works in my hub, I plan to make a PR to the original dataset. However, in trying to do so I have encountered certain problems as detailed below.\r\n\r\nIssues I have encountered:\r\n- Without a loading script, the train and test files are loaded together into a unique `dataset.Dataset` -> so I wrote a loading script. Also, I need a loading script otherwise I cannot specify multiple configurations\r\n- Once my loading script is working locally, I do not manage to make it work on the hub. In particular, I would like to be able to load the dataset like this\r\n```python\r\nload_dataset(\"pietrolesci\/ag_news\", name=\"my_configuration\")\r\n```\r\n\r\nApparently, the `load_dataset` is able to pick up the loading script from the hub and run it. However, it errors because it is unable to find the files. The structure of my hub repo is the following\r\n```\r\nag_news.py\r\ntrain.csv\r\ntest.csv\r\n```\r\nand the loading script I specify `data_dir=Path(__file__).parent` and `data_files=DataFilesDict({\"train\": \"train.csv\", \"test\": \"test.csv\"})`. In the documentation I could not find info regarding loading a dataset from the hub using a loading script present on the hub.\r\n\r\nAny suggestion is very much appreciated.\r\n\r\nBest,\r\nPietro\r\n\r\nLink to the hub repo: https:\/\/huggingface.co\/datasets\/pietrolesci\/ag_news\r\n\r\nBONUS: how can I make the data viewer work in this specific case? :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3300\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3300\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3299","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3299\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3299\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3299\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3299","id":1058518213,"node_id":"I_kwDODunzps4_F7TF","number":3299,"title":"Add option to find unique elements in nested sequences when calling `Dataset.unique`","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1637327766000,"updated_at":1637327766000,"closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"It would be nice to have an option to flatten nested sequences to find unique elements stored in them when calling `Dataset.unique`. Currently, `Dataset.unique` only supports finding unique sequences and not unique elements in that situation.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3299\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3299\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3298","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3298\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3298\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3298\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3298","id":1058420201,"node_id":"I_kwDODunzps4_FjXp","number":3298,"title":"Agnews dataset viewer is not working","user":{"login":"pietrolesci","id":61748653,"node_id":"MDQ6VXNlcjYxNzQ4NjUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/61748653?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pietrolesci","html_url":"https:\/\/github.com\/pietrolesci","followers_url":"https:\/\/api.github.com\/users\/pietrolesci\/followers","following_url":"https:\/\/api.github.com\/users\/pietrolesci\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pietrolesci\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pietrolesci\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pietrolesci\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pietrolesci\/orgs","repos_url":"https:\/\/api.github.com\/users\/pietrolesci\/repos","events_url":"https:\/\/api.github.com\/users\/pietrolesci\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pietrolesci\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi ! Thanks for reporting\r\nWe've already fixed the code that generates the preview for this dataset, we'll release the fix soon :)","Hi @lhoestq, thanks for your feedback!","Fixed in the viewer.\r\n\r\nhttps:\/\/huggingface.co\/datasets\/ag_news"],"created_at":1637320739000,"updated_at":1640103845000,"closed_at":1640103845000,"author_association":"NONE","active_lock_reason":null,"body":"## Dataset viewer issue for '*name of the dataset*'\r\n\r\n**Link:** https:\/\/huggingface.co\/datasets\/ag_news\r\n\r\nHi there, the `ag_news` dataset viewer is not working.\r\n\r\nAm I the one who added this dataset? No\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3298\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3298\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3297","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3297\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3297\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3297\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3297","id":1058263859,"node_id":"I_kwDODunzps4_E9Mz","number":3297,"title":".map() cache is wrongfully reused - only happens when the mapping function is imported","user":{"login":"eladsegal","id":13485709,"node_id":"MDQ6VXNlcjEzNDg1NzA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13485709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eladsegal","html_url":"https:\/\/github.com\/eladsegal","followers_url":"https:\/\/api.github.com\/users\/eladsegal\/followers","following_url":"https:\/\/api.github.com\/users\/eladsegal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eladsegal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eladsegal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eladsegal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eladsegal\/orgs","repos_url":"https:\/\/api.github.com\/users\/eladsegal\/repos","events_url":"https:\/\/api.github.com\/users\/eladsegal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eladsegal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi ! Thanks for reporting. Indeed this is a current limitation of the usage we have of `dill` in `datasets`. I'd suggest you use your workaround for now until we find a way to fix this. Maybe functions that are not coming from a module not installed with pip should be dumped completely, rather than only taking their locations into account","I agree. Sounds like a solution for it would be pretty dirty, even [cloudpickle](https:\/\/stackoverflow.com\/a\/16891169) doesn't help in this case.\r\nIn the meanwhile I think that adding a warning and the workaround somewhere in the documentation can be helpful."],"created_at":1637309916000,"updated_at":1638834340000,"closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"## Describe the bug\r\nWhen `.map` is used with a mapping function that is imported, the cache is reused even if the mapping function has been modified.\r\nThe reason for this is that `dill` that is used for creating the fingerprint [pickles imported functions by reference](https:\/\/stackoverflow.com\/a\/67851411).\r\n\r\nI guess it is not a widespread case, but it can still lead to unwanted results unnoticeably. \r\n\r\n## Steps to reproduce the bug\r\nCreate files `a.py` and `b.py`:\r\n```python\r\n# a.py\r\nfrom datasets import load_dataset\r\n\r\ndef main():\r\n squad = load_dataset(\"squad\")\r\n squad.map(mapping_func, batched=True)\r\n\r\ndef mapping_func(examples):\r\n ID_LENGTH = 4\r\n examples[\"id\"] = [id_[:ID_LENGTH] for id_ in examples[\"id\"]]\r\n return examples\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n```\r\n```python\r\n# b.py\r\nfrom datasets import load_dataset\r\nfrom a import mapping_func\r\n\r\ndef main():\r\n squad = load_dataset(\"squad\")\r\n squad.map(mapping_func, batched=True)\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n```\r\nRun `python b.py` twice: In the first run you will see tqdm bars showing that the data is processed, and in the second run you will see \"Loading cached processed dataset at...\".\r\nNow change `ID_LENGTH` to another number in order to change the mapping function, and run `python b.py` again. You'll see that `.map` loads from the cache the result of the previous mapping function.\r\n\r\n## Expected results\r\nRun `python a.py` twice: In the first run you will see tqdm bars showing that the data is processed, and in the second run you will see \"Loading cached processed dataset at...\".\r\nNow change `ID_LENGTH` to another number in order to change the mapping function, and run `python a.py` again. You'll see that the dataset is being processed and that there's no reuse of the previous mapping function result.\r\n\r\n## Workaround\r\nPut the mapping function inside a dummy class as a static method:\r\n```python\r\n# a.py\r\nclass MappingFuncClass:\r\n @staticmethod\r\n def mapping_func(examples):\r\n ID_LENGTH = 4\r\n examples[\"id\"] = [id_[:ID_LENGTH] for id_ in examples[\"id\"]]\r\n return examples\r\n```\r\n```python\r\n# b.py\r\nfrom datasets import load_dataset\r\nfrom a import MappingFuncClass\r\n\r\ndef main():\r\n squad = load_dataset(\"squad\")\r\n squad.map(MappingFuncClass.mapping_func, batched=True)\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.15.1\r\n- Platform: Linux-4.4.0-19041-Microsoft-x86_64-with-glibc2.17\r\n- Python version: 3.8.10\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3297\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3297\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3296","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3296\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3296\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3296\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3296","id":1057970638,"node_id":"PR_kwDODunzps4uvlQz","number":3296,"title":"Fix temporary dataset_path creation for URIs related to remote fs","user":{"login":"francisco-perez-sorrosal","id":918006,"node_id":"MDQ6VXNlcjkxODAwNg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/918006?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal","html_url":"https:\/\/github.com\/francisco-perez-sorrosal","followers_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/followers","following_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/orgs","repos_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/repos","events_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi ! Thanks for the fix :) \r\n\r\nI think this should be `extract_path_from_uri` 's responsibility to strip the extra `\/` from a badly formatted path like `hdfs:\/\/\/absolute\/path` (or raise an error). Do you think you can simply do the changes in `extract_path_from_uri` ? This way this fix will be available for all the other parts of the lib that need to extract the inner path from an URI of a remote filesystem\r\n\r\nThen we can also keep your test cases but simply apply them to `extract_path_from_uri` instead","Hi @lhoestq! No problem! Thanks for your interest! :)\r\n\r\nI think stripping the 3rd `\/` in `hdfs:\/\/\/absolute\/path` inside `extract_path_from_uri` is not the solution. When I provide `hdfs:\/\/\/absolute\/path` to `extract_path_from_uri` we want `\/absolute\/path` to be returned, as it does now (at least in the case of URIs with `hdfs` schemas, for `s3` is different as it should start with a bucket name).\r\n\r\nThe problem comes in line 1041 in the original code below:\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/42f6b1d18a4a1b6009b6e62d115491be16dfca22\/src\/datasets\/arrow_dataset.py#L1038-L1042\r\n\r\nLets assume the following parameters for line 1041 after `extract_path_from_uri` has removed the `hdfs` schema part and the `:\/\/` from `hdfs:\/\/\/absolute\/path`, and `get_temporary_cache_files_directory()` returns `\/tmp\/a1b2b3c4`, as it is shown below: \r\n\r\n```python\r\nsrc_dataset_path = '\/absolute\/path'\r\ntmp_dir = '\/tmp\/a1b2b3c4'\r\ndataset_path = Path(tmp_dir, src_dataset_path)\r\n```\r\n\r\nAfter passing those paths to the `Path` object, `dataset_path` contains only `\/absolute\/path`; that is, it has lost the temporary directory path. This is because, when two (or more) absolute paths are passed to the `Path` function, only the last one is taken. However, if the contents of those variables are:\r\n\r\n```python\r\nsrc_dataset_path = 'relative\/path'\r\ntmp_dir = '\/tmp\/a1b2b3c4'\r\ndataset_path = Path(tmp_dir, src_dataset_path)\r\n```\r\n\r\nthen `dataset_path` contains `\/tmp\/a1b2b3c4\/relative\/path` as expected.\r\n\r\nAbsolute paths are allowed in hdfs URIs, so that's why I added the extra function `build_local_temp_path` in the PR; so in case the second argument is an absolute path, it still will create the correct absolute path by concatenating the temp dir and the path passed by converting it to a relative path (and it also works for windows paths too.) It also allows to add the tests, checking that the main combinations are ok.\r\n\r\nI've checked all the places where the result of `extract_path_from_uri` is used, and as far as I've seen this is the only place where it is concatenated with another possible absolute path, so no need to add `build_local_temp_path` anywhere else. \r\n"],"created_at":1637278365000,"updated_at":1638787504000,"closed_at":1638787504000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"This aims to close #3295","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3296\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3296\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3296","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3296","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3296.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3296.patch","merged_at":1638787503000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3295","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3295\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3295\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3295\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3295","id":1057954892,"node_id":"I_kwDODunzps4_DxxM","number":3295,"title":"Temporary dataset_path for remote fs URIs not built properly in arrow_dataset.py::load_from_disk","user":{"login":"francisco-perez-sorrosal","id":918006,"node_id":"MDQ6VXNlcjkxODAwNg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/918006?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal","html_url":"https:\/\/github.com\/francisco-perez-sorrosal","followers_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/followers","following_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/orgs","repos_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/repos","events_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi ! Good catch and thanks for opening a PR :)\r\n\r\nI just responded in your PR"],"created_at":1637277842000,"updated_at":1638787504000,"closed_at":1638787504000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"## Describe the bug\r\nWhen trying to build a temporary dataset path from a remote URI in this block of code:\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/42f6b1d18a4a1b6009b6e62d115491be16dfca22\/src\/datasets\/arrow_dataset.py#L1038-L1042\r\n\r\nthe result is not the expected when passing an absolute path in an URI like `hdfs:\/\/\/absolute\/path`.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\ndataset_path = \"hdfs:\/\/\/absolute\/path\"\r\nsrc_dataset_path = extract_path_from_uri(dataset_path)\r\ntmp_dir = get_temporary_cache_files_directory()\r\ndataset_path = Path(tmp_dir, src_dataset_path)\r\nprint(dataset_path)\r\n```\r\n\r\n## Expected results\r\nWith the code above, we would expect a value in `dataset_path` similar to:\r\n`\/tmp\/tmpnwxyvao5\/absolute\/path`\r\n\r\n## Actual results\r\nHowever, we get a `dataset_path` value like:\r\n`\/absolute\/path`\r\n\r\nThis is because this line here: https:\/\/github.com\/huggingface\/datasets\/blob\/42f6b1d18a4a1b6009b6e62d115491be16dfca22\/src\/datasets\/arrow_dataset.py#L1041\r\nreturns the last absolute path when two absolute paths (the one in `tmp_dir` and the one extracted from the URI in `src_dataset_path`) are passed as arguments.\r\n\r\n## Environment info\r\n- `datasets` version: 1.13.3\r\n- Platform: Linux-3.10.0-1160.15.2.el7.x86_64-x86_64-with-glibc2.33\r\n- Python version: 3.9.7\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3295\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3295\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3294","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3294\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3294\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3294\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3294","id":1057495473,"node_id":"I_kwDODunzps4_CBmx","number":3294,"title":"Add Natural Adversarial Objects dataset","user":{"login":"osanseviero","id":7246357,"node_id":"MDQ6VXNlcjcyNDYzNTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7246357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/osanseviero","html_url":"https:\/\/github.com\/osanseviero","followers_url":"https:\/\/api.github.com\/users\/osanseviero\/followers","following_url":"https:\/\/api.github.com\/users\/osanseviero\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/osanseviero\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/osanseviero\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/osanseviero\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/osanseviero\/orgs","repos_url":"https:\/\/api.github.com\/users\/osanseviero\/repos","events_url":"https:\/\/api.github.com\/users\/osanseviero\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/osanseviero\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":3608941089,"node_id":"LA_kwDODunzps7XHBIh","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/vision","name":"vision","color":"bfdadc","default":false,"description":"Vision datasets"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1637249684000,"updated_at":1638964802000,"closed_at":null,"author_association":"NONE","active_lock_reason":null,"body":"## Adding a Dataset\r\n- **Name:** Natural Adversarial Objects (NAO)\r\n- **Description:** Natural Adversarial Objects (NAO) is a new dataset to evaluate the robustness of object detection models. NAO contains 7,934 images and 9,943 objects that are unmodified and representative of real-world scenarios, but cause state-of-the-art detection models to misclassify with high confidence.\r\n- **Paper:** https:\/\/arxiv.org\/abs\/2111.04204v1\r\n- **Data:** https:\/\/drive.google.com\/drive\/folders\/15P8sOWoJku6SSEiHLEts86ORfytGezi8\r\n- **Motivation:** interesting object detection dataset useful for miscclassifications\r\n\r\ncc @NielsRogge \r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3294\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3294\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3293","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3293\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3293\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3293\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3293","id":1057004431,"node_id":"PR_kwDODunzps4uslLN","number":3293,"title":"Pin version exclusion for Markdown","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1637218561000,"updated_at":1637231285000,"closed_at":1637231284000,"author_association":"MEMBER","active_lock_reason":null,"body":"As Markdown version 3.3.5 has a bug, it is better to exclude it in case the users have it previously installed in their environment.\r\n\r\nRelated to #3289, #3286.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3293\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3293\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3293","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3293","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3293.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3293.patch","merged_at":1637231284000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3292","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3292\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3292\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3292\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3292","id":1056962554,"node_id":"I_kwDODunzps4-__f6","number":3292,"title":"Not able to load 'wikipedia' dataset","user":{"login":"abhibisht89","id":13541524,"node_id":"MDQ6VXNlcjEzNTQxNTI0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13541524?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhibisht89","html_url":"https:\/\/github.com\/abhibisht89","followers_url":"https:\/\/api.github.com\/users\/abhibisht89\/followers","following_url":"https:\/\/api.github.com\/users\/abhibisht89\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhibisht89\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhibisht89\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhibisht89\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhibisht89\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhibisht89\/repos","events_url":"https:\/\/api.github.com\/users\/abhibisht89\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhibisht89\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi ! Indeed it looks like the code snippet on the Hugging face Hub doesn't show the second parameter\r\n\r\n\r\n\r\nThanks for reporting, I'm taking a look\r\n"],"created_at":1637214078000,"updated_at":1637340569000,"closed_at":1637340569000,"author_association":"NONE","active_lock_reason":null,"body":"## Describe the bug\r\nI am following the instruction for loading the wikipedia dataset using datasets. However getting the below error.\r\n\r\n## Steps to reproduce the bug\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"wikipedia\")\r\n```\r\n\r\n## Expected results\r\nA clear and concise description of the expected results.\r\n\r\n## Actual results\r\n~\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages\/datasets\/builder.py in _create_builder_config(self, name, custom_features, **config_kwargs)\r\n 339 \"Config name is missing.\"\r\n 340 \"\\nPlease pick one among the available configs: %s\" % list(self.builder_configs.keys())\r\n--> 341 + \"\\nExample of usage:\\n\\t`{}`\".format(example_of_usage)\r\n 342 )\r\n 343 builder_config = self.BUILDER_CONFIGS[0]\r\n\r\nValueError: Config name is missing.\r\nPlease pick one among the available configs: ['20200501.aa', '20200501.ab', '20200501.ace', '20200501.ady', '20200501.af', '20200501.ak', '20200501.als', '20200501.am', '20200501.an', '20200501.ang', '20200501.ar', '20200501.arc', '20200501.arz', '20200501.as', '20200501.ast', '20200501.atj', '20200501.av', '20200501.ay', '20200501.az', '20200501.azb', '20200501.ba', '20200501.bar', '20200501.bat-smg', '20200501.bcl', '20200501.be', '20200501.be-x-old', '20200501.bg', '20200501.bh', '20200501.bi', '20200501.bjn', '20200501.bm', '20200501.bn', '20200501.bo', '20200501.bpy', '20200501.br', '20200501.bs', '20200501.bug', '20200501.bxr', '20200501.ca', '20200501.cbk-zam', '20200501.cdo', '20200501.ce', '20200501.ceb', '20200501.ch', '20200501.cho', '20200501.chr', '20200501.chy', '20200501.ckb', '20200501.co', '20200501.cr', '20200501.crh', '20200501.cs', '20200501.csb', '20200501.cu', '20200501.cv', '20200501.cy', '20200501.da', '20200501.de', '20200501.din', '20200501.diq', '20200501.dsb', '20200501.dty', '20200501.dv', '20200501.dz', '20200501.ee', '20200501.el', '20200501.eml', '20200501.en', '20200501.eo', '20200501.es', '20200501.et', '20200501.eu', '20200501.ext', '20200501.fa', '20200501.ff', '20200501.fi', '20200501.fiu-vro', '20200501.fj', '20200501.fo', '20200501.fr', '20200501.frp', '20200501.frr', '20200501.fur', '20200501.fy', '20200501.ga', '20200501.gag', '20200501.gan', '20200501.gd', '20200501.gl', '20200501.glk', '20200501.gn', '20200501.gom', '20200501.gor', '20200501.got', '20200501.gu', '20200501.gv', '20200501.ha', '20200501.hak', '20200501.haw', '20200501.he', '20200501.hi', '20200501.hif', '20200501.ho', '20200501.hr', '20200501.hsb', '20200501.ht', '20200501.hu', '20200501.hy', '20200501.ia', '20200501.id', '20200501.ie', '20200501.ig', '20200501.ii', '20200501.ik', '20200501.ilo', '20200501.inh', '20200501.io', '20200501.is', '20200501.it', '20200501.iu', '20200501.ja', '20200501.jam', '20200501.jbo', '20200501.jv', '20200501.ka', '20200501.kaa', '20200501.kab', '20200501.kbd', '20200501.kbp', '20200501.kg', '20200501.ki', '20200501.kj', '20200501.kk', '20200501.kl', '20200501.km', '20200501.kn', '20200501.ko', '20200501.koi', '20200501.krc', '20200501.ks', '20200501.ksh', '20200501.ku', '20200501.kv', '20200501.kw', '20200501.ky', '20200501.la', '20200501.lad', '20200501.lb', '20200501.lbe', '20200501.lez', '20200501.lfn', '20200501.lg', '20200501.li', '20200501.lij', '20200501.lmo', '20200501.ln', '20200501.lo', '20200501.lrc', '20200501.lt', '20200501.ltg', '20200501.lv', '20200501.mai', '20200501.map-bms', '20200501.mdf', '20200501.mg', '20200501.mh', '20200501.mhr', '20200501.mi', '20200501.min', '20200501.mk', '20200501.ml', '20200501.mn', '20200501.mr', '20200501.mrj', '20200501.ms', '20200501.mt', '20200501.mus', '20200501.mwl', '20200501.my', '20200501.myv', '20200501.mzn', '20200501.na', '20200501.nah', '20200501.nap', '20200501.nds', '20200501.nds-nl', '20200501.ne', '20200501.new', '20200501.ng', '20200501.nl', '20200501.nn', '20200501.no', '20200501.nov', '20200501.nrm', '20200501.nso', '20200501.nv', '20200501.ny', '20200501.oc', '20200501.olo', '20200501.om', '20200501.or', '20200501.os', '20200501.pa', '20200501.pag', '20200501.pam', '20200501.pap', '20200501.pcd', '20200501.pdc', '20200501.pfl', '20200501.pi', '20200501.pih', '20200501.pl', '20200501.pms', '20200501.pnb', '20200501.pnt', '20200501.ps', '20200501.pt', '20200501.qu', '20200501.rm', '20200501.rmy', '20200501.rn', '20200501.ro', '20200501.roa-rup', '20200501.roa-tara', '20200501.ru', '20200501.rue', '20200501.rw', '20200501.sa', '20200501.sah', '20200501.sat', '20200501.sc', '20200501.scn', '20200501.sco', '20200501.sd', '20200501.se', '20200501.sg', '20200501.sh', '20200501.si', '20200501.simple', '20200501.sk', '20200501.sl', '20200501.sm', '20200501.sn', '20200501.so', '20200501.sq', '20200501.sr', '20200501.srn', '20200501.ss', '20200501.st', '20200501.stq', '20200501.su', '20200501.sv', '20200501.sw', '20200501.szl', '20200501.ta', '20200501.tcy', '20200501.te', '20200501.tet', '20200501.tg', '20200501.th', '20200501.ti', '20200501.tk', '20200501.tl', '20200501.tn', '20200501.to', '20200501.tpi', '20200501.tr', '20200501.ts', '20200501.tt', '20200501.tum', '20200501.tw', '20200501.ty', '20200501.tyv', '20200501.udm', '20200501.ug', '20200501.uk', '20200501.ur', '20200501.uz', '20200501.ve', '20200501.vec', '20200501.vep', '20200501.vi', '20200501.vls', '20200501.vo', '20200501.wa', '20200501.war', '20200501.wo', '20200501.wuu', '20200501.xal', '20200501.xh', '20200501.xmf', '20200501.yi', '20200501.yo', '20200501.za', '20200501.zea', '20200501.zh', '20200501.zh-classical', '20200501.zh-min-nan', '20200501.zh-yue', '20200501.zu']\r\nExample of usage:\r\n\t`load_dataset('wikipedia', '20200501.aa')`\r\n\r\nI think the other parameter is missing in the load_dataset function that is not shown in the instruction.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3292\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3292\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3291","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3291\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3291\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3291\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3291","id":1056689876,"node_id":"PR_kwDODunzps4urikR","number":3291,"title":"Use f-strings in the dataset scripts","user":{"login":"Carlosbogo","id":84228424,"node_id":"MDQ6VXNlcjg0MjI4NDI0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/84228424?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Carlosbogo","html_url":"https:\/\/github.com\/Carlosbogo","followers_url":"https:\/\/api.github.com\/users\/Carlosbogo\/followers","following_url":"https:\/\/api.github.com\/users\/Carlosbogo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Carlosbogo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Carlosbogo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Carlosbogo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Carlosbogo\/orgs","repos_url":"https:\/\/api.github.com\/users\/Carlosbogo\/repos","events_url":"https:\/\/api.github.com\/users\/Carlosbogo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Carlosbogo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1637187619000,"updated_at":1637599216000,"closed_at":1637599216000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Uses f-strings to format the .py files in the dataset folder","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3291\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3291\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3291","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3291","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3291.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3291.patch","merged_at":1637599216000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3290","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3290\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3290\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3290\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3290","id":1056414856,"node_id":"PR_kwDODunzps4uqzcv","number":3290,"title":"Make several audio datasets streamable","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Reading FLAC (for `librispeech_asr`) works OK for me (`soundfile` version: `0.10.3`):\r\n```python\r\nIn [2]: ds = load_dataset(\"datasets\/librispeech_asr\/librispeech_asr.py\", \"clean\", streaming=True, split=\"train.100\")\r\n\r\nIn [3]: item = next(iter(ds))\r\n\r\nIn [4]: item.keys()\r\nOut[4]: dict_keys(['file', 'audio', 'text', 'speaker_id', 'chapter_id', 'id'])\r\n\r\nIn [5]: item[\"file\"]\r\nOut[5]: '374-180298-0000.flac'\r\n\r\nIn [6]: item[\"audio\"].keys()\r\nOut[6]: dict_keys(['path', 'array', 'sampling_rate'])\r\n\r\nIn [7]: item[\"audio\"][\"sampling_rate\"]\r\nOut[7]: 16000\r\n\r\nIn [8]: item[\"audio\"][\"path\"]\r\nOut[8]: '374-180298-0000.flac'\r\n\r\nIn [9]: item[\"audio\"][\"array\"].shape\r\nOut[9]: (232480,)\r\n```","Oh cool ! I think this might have come from an issue with my local `soundfile` installation then","I'll do `multilingual_librispeech` in a separate PR since it requires the data to be in another format (in particular separate the train\/dev\/test splits in different files)"],"created_at":1637171021000,"updated_at":1637334538000,"closed_at":1637334537000,"author_association":"MEMBER","active_lock_reason":null,"body":"Needs https:\/\/github.com\/huggingface\/datasets\/pull\/3129 to be merged first<\/s>\r\n\r\nMake those audio datasets streamable:\r\n- [x] common_voice\r\n- [x] openslr\r\n- [x] vivos\r\n- [x] librispeech_asr (still has some issues to read FLAC)<\/s> *actually it's ok*\r\n- [ ] multilingual_librispeech (yet to be converted)<\/S> *TODO in a separate PR*","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3290\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3290\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3290","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3290","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3290.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3290.patch","merged_at":1637334537000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3289","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3289\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3289\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3289\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3289","id":1056323715,"node_id":"PR_kwDODunzps4uqf79","number":3289,"title":"Unpin markdown for build_docs now that it's fixed","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1637166173000,"updated_at":1637166189000,"closed_at":1637166188000,"author_association":"MEMBER","active_lock_reason":null,"body":"`markdown`'s bug has been fixed, so this PR reverts #3286 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3289\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3289\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3289","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3289","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3289.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3289.patch","merged_at":1637166188000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3288","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3288\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3288\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3288\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3288","id":1056145703,"node_id":"PR_kwDODunzps4up6S5","number":3288,"title":"Allow datasets with indices table when concatenating along axis=1","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1637156488000,"updated_at":1637163672000,"closed_at":1637163671000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Calls `flatten_indices` on the datasets with indices table in `concatenate_datasets` to fix issues when concatenating along `axis=1`.\r\n\r\n\r\ncc @lhoestq: I decided to flatten all the datasets instead of flattening all the datasets except the largest one in the end. The latter approach fails on the following example:\r\n```python\r\na = Dataset.from_dict({\"a\": [10, 20, 30, 40]})\r\nb = Dataset.from_dict({\"b\": [10, 20, 30, 40, 50, 60]}) # largest dataset\r\na = a.select([1, 2, 3])\r\nb = b.select([1, 2, 3])\r\nconcatenate_datasets([a, b], axis=1) # fails at line concat_tables(...) because the real length of b's data is 6 and a's length is 3 after flattening (was 4 before flattening)\r\n```\r\n\r\nAlso, it requires additional re-ordering of indices to prepare them for working with the indices table of the largest dataset. IMO not worth when we save only one `flatten_indices` call. (feel free to check the code of that approach at https:\/\/github.com\/huggingface\/datasets\/commit\/6acd10481c70950dcfdbfd2bab0bf0c74ad80bcb if you are interested)\r\n\r\nFixes #3273\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3288\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3288\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3288","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3288","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3288.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3288.patch","merged_at":1637163671000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3287","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3287\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3287\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3287\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3287","id":1056079724,"node_id":"PR_kwDODunzps4upsWR","number":3287,"title":"Add The Pile dataset and PubMed Central subset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1637152558000,"updated_at":1638372548000,"closed_at":1638372547000,"author_association":"MEMBER","active_lock_reason":null,"body":"Add:\r\n- The complete final version of The Pile dataset: \"all\" config\r\n- PubMed Central subset of The Pile: \"pubmed_central\" config\r\n\r\nClose #1675, close bigscience-workshop\/data_tooling#74.\r\n\r\nCC: @StellaAthena, @lewtun ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3287\/reactions","total_count":4,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":4,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3287\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3287","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3287","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3287.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3287.patch","merged_at":1638372546000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3286","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3286\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3286\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3286\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3286","id":1056008586,"node_id":"PR_kwDODunzps4updTK","number":3286,"title":"Fix build_docs CI","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1637147936000,"updated_at":1637147960000,"closed_at":1637147959000,"author_association":"MEMBER","active_lock_reason":null,"body":"Because of https:\/\/github.com\/Python-Markdown\/markdown\/issues\/1196 we have to temporarily pin `markdown` to 3.3.4 for the docs to build without issues","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3286\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3286\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3286","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3286","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3286.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3286.patch","merged_at":1637147959000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3285","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3285\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3285\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3285\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3285","id":1055506730,"node_id":"I_kwDODunzps4-6cEq","number":3285,"title":"Add IEMOCAP dataset","user":{"login":"osanseviero","id":7246357,"node_id":"MDQ6VXNlcjcyNDYzNTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7246357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/osanseviero","html_url":"https:\/\/github.com\/osanseviero","followers_url":"https:\/\/api.github.com\/users\/osanseviero\/followers","following_url":"https:\/\/api.github.com\/users\/osanseviero\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/osanseviero\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/osanseviero\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/osanseviero\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/osanseviero\/orgs","repos_url":"https:\/\/api.github.com\/users\/osanseviero\/repos","events_url":"https:\/\/api.github.com\/users\/osanseviero\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/osanseviero\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":2725241052,"node_id":"MDU6TGFiZWwyNzI1MjQxMDUy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/speech","name":"speech","color":"d93f0b","default":false,"description":""},{"id":3608941089,"node_id":"LA_kwDODunzps7XHBIh","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/vision","name":"vision","color":"bfdadc","default":false,"description":"Vision datasets"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1637102840000,"updated_at":1638964664000,"closed_at":null,"author_association":"NONE","active_lock_reason":null,"body":"## Adding a Dataset\r\n- **Name:** IEMOCAP\r\n- **Description:** acted, multimodal and multispeaker database\r\n- **Paper:** https:\/\/sail.usc.edu\/iemocap\/Busso_2008_iemocap.pdf\r\n- **Data:** https:\/\/sail.usc.edu\/iemocap\/index.html\r\n- **Motivation:** Useful multimodal dataset\r\n\r\ncc @anton-l \r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3285\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3285\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3284","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3284\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3284\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3284\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3284","id":1055502909,"node_id":"I_kwDODunzps4-6bI9","number":3284,"title":"Add VoxLingua107 dataset","user":{"login":"osanseviero","id":7246357,"node_id":"MDQ6VXNlcjcyNDYzNTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7246357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/osanseviero","html_url":"https:\/\/github.com\/osanseviero","followers_url":"https:\/\/api.github.com\/users\/osanseviero\/followers","following_url":"https:\/\/api.github.com\/users\/osanseviero\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/osanseviero\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/osanseviero\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/osanseviero\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/osanseviero\/orgs","repos_url":"https:\/\/api.github.com\/users\/osanseviero\/repos","events_url":"https:\/\/api.github.com\/users\/osanseviero\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/osanseviero\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":2725241052,"node_id":"MDU6TGFiZWwyNzI1MjQxMDUy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/speech","name":"speech","color":"d93f0b","default":false,"description":""}],"state":"open","locked":false,"assignee":{"login":"polinaeterna","id":16348744,"node_id":"MDQ6VXNlcjE2MzQ4NzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16348744?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/polinaeterna","html_url":"https:\/\/github.com\/polinaeterna","followers_url":"https:\/\/api.github.com\/users\/polinaeterna\/followers","following_url":"https:\/\/api.github.com\/users\/polinaeterna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/polinaeterna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/polinaeterna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/polinaeterna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/polinaeterna\/orgs","repos_url":"https:\/\/api.github.com\/users\/polinaeterna\/repos","events_url":"https:\/\/api.github.com\/users\/polinaeterna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/polinaeterna\/received_events","type":"User","site_admin":false},"assignees":[{"login":"polinaeterna","id":16348744,"node_id":"MDQ6VXNlcjE2MzQ4NzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16348744?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/polinaeterna","html_url":"https:\/\/github.com\/polinaeterna","followers_url":"https:\/\/api.github.com\/users\/polinaeterna\/followers","following_url":"https:\/\/api.github.com\/users\/polinaeterna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/polinaeterna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/polinaeterna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/polinaeterna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/polinaeterna\/orgs","repos_url":"https:\/\/api.github.com\/users\/polinaeterna\/repos","events_url":"https:\/\/api.github.com\/users\/polinaeterna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/polinaeterna\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["#self-assign"],"created_at":1637102648000,"updated_at":1638784185000,"closed_at":null,"author_association":"NONE","active_lock_reason":null,"body":"## Adding a Dataset\r\n- **Name:** VoxLingua107\r\n- **Description:** VoxLingua107 is a speech dataset for training spoken language identification models. The dataset consists of short speech segments automatically extracted from YouTube videos and labeled according the language of the video title and description, with some post-processing steps to filter out false positives.\r\n- **Paper:** https:\/\/arxiv.org\/abs\/2011.12998\r\n- **Data:** http:\/\/bark.phon.ioc.ee\/voxlingua107\/\r\n- **Motivation:** Nice audio classification dataset\r\n\r\ncc @anton-l \r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3284\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3284\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3283","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3283\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3283\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3283\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3283","id":1055495874,"node_id":"I_kwDODunzps4-6ZbC","number":3283,"title":"Add Speech Commands dataset","user":{"login":"osanseviero","id":7246357,"node_id":"MDQ6VXNlcjcyNDYzNTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7246357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/osanseviero","html_url":"https:\/\/github.com\/osanseviero","followers_url":"https:\/\/api.github.com\/users\/osanseviero\/followers","following_url":"https:\/\/api.github.com\/users\/osanseviero\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/osanseviero\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/osanseviero\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/osanseviero\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/osanseviero\/orgs","repos_url":"https:\/\/api.github.com\/users\/osanseviero\/repos","events_url":"https:\/\/api.github.com\/users\/osanseviero\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/osanseviero\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":2725241052,"node_id":"MDU6TGFiZWwyNzI1MjQxMDUy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/speech","name":"speech","color":"d93f0b","default":false,"description":""}],"state":"closed","locked":false,"assignee":{"login":"polinaeterna","id":16348744,"node_id":"MDQ6VXNlcjE2MzQ4NzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16348744?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/polinaeterna","html_url":"https:\/\/github.com\/polinaeterna","followers_url":"https:\/\/api.github.com\/users\/polinaeterna\/followers","following_url":"https:\/\/api.github.com\/users\/polinaeterna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/polinaeterna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/polinaeterna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/polinaeterna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/polinaeterna\/orgs","repos_url":"https:\/\/api.github.com\/users\/polinaeterna\/repos","events_url":"https:\/\/api.github.com\/users\/polinaeterna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/polinaeterna\/received_events","type":"User","site_admin":false},"assignees":[{"login":"polinaeterna","id":16348744,"node_id":"MDQ6VXNlcjE2MzQ4NzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16348744?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/polinaeterna","html_url":"https:\/\/github.com\/polinaeterna","followers_url":"https:\/\/api.github.com\/users\/polinaeterna\/followers","following_url":"https:\/\/api.github.com\/users\/polinaeterna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/polinaeterna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/polinaeterna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/polinaeterna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/polinaeterna\/orgs","repos_url":"https:\/\/api.github.com\/users\/polinaeterna\/repos","events_url":"https:\/\/api.github.com\/users\/polinaeterna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/polinaeterna\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["#self-assign"],"created_at":1637102396000,"updated_at":1639132215000,"closed_at":1639132215000,"author_association":"NONE","active_lock_reason":null,"body":"## Adding a Dataset\r\n- **Name:** Speech commands\r\n- **Description:** A Dataset for Limited-Vocabulary Speech Recognition\r\n- **Paper:** https:\/\/arxiv.org\/abs\/1804.03209\r\n- **Data:** https:\/\/www.tensorflow.org\/datasets\/catalog\/speech_commands, Available:\r\nhttp:\/\/download.tensorflow.org\/data\/speech_commands_v0.02.tar.gz\r\n- **Motivation:** Nice dataset for audio classification training\r\n\r\ncc @anton-l \r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3283\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3283\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3282","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3282\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3282\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3282\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3282","id":1055054898,"node_id":"I_kwDODunzps4-4twy","number":3282,"title":"ConnectionError: Couldn't reach https:\/\/huggingface.co\/datasets\/oscar-corpus\/OSCAR-2109\/resolve\/main\/OSCAR-2109.py","user":{"login":"MinionAttack","id":10078549,"node_id":"MDQ6VXNlcjEwMDc4NTQ5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10078549?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MinionAttack","html_url":"https:\/\/github.com\/MinionAttack","followers_url":"https:\/\/api.github.com\/users\/MinionAttack\/followers","following_url":"https:\/\/api.github.com\/users\/MinionAttack\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MinionAttack\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MinionAttack\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MinionAttack\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MinionAttack\/orgs","repos_url":"https:\/\/api.github.com\/users\/MinionAttack\/repos","events_url":"https:\/\/api.github.com\/users\/MinionAttack\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MinionAttack\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi ! Thanks for reporting :)\r\nI think this is because the dataset is behind an access page. We can fix the dataset viewer\r\n\r\nIf you also have this error when you use the `datasets` library in python, you should probably pass `use_auth_token=True` to the `load_dataset()` function to use your account to access the dataset.","Ah ok, I didn't realise about the login page. I'll try `use_auth_token=True` and see if that solves it.\r\n\r\nRegards!","Hi, \r\n\r\nUsing `use_auth_token=True` and downloading the credentials with `huggingface-cli login` (stored in .huggingface\/token) solved the issue.\r\n\r\nShould I leave the issue open until you fix the Dataset viewer issue?","Cool ! Yes let's keep this issue open until the viewer is fixed - I'll close it when this is fixed. Thanks","The error I get when trying to load OSCAR 21.09 is this\r\n```\r\nConnectionError: Couldn't reach https:\/\/huggingface.co\/datasets\/oscar-corpus\/OSCAR-2109\/resolve\/main\/OSCAR-2109.py\r\n```\r\n\r\nThe URL I get in the browser is this\r\n```\r\nhttps:\/\/huggingface.co\/datasets\/oscar-corpus\/OSCAR-2109\/blob\/main\/OSCAR-2109.py\r\n```\r\n\r\nMaybe URL is the issue? (resolve vs blob)","> The error I get when trying to load OSCAR 21.09 is this\r\n> \r\n> ```\r\n> ConnectionError: Couldn't reach https:\/\/huggingface.co\/datasets\/oscar-corpus\/OSCAR-2109\/resolve\/main\/OSCAR-2109.py\r\n> ```\r\n> \r\n> The URL I get in the browser is this\r\n> \r\n> ```\r\n> https:\/\/huggingface.co\/datasets\/oscar-corpus\/OSCAR-2109\/blob\/main\/OSCAR-2109.py\r\n> ```\r\n> \r\n> Maybe URL is the issue? (resolve vs blob)\r\n\r\nYou need to download your login credentials. See `huggingface-cli login` documentation and when loading the dataset use `use_auth_token=True`:\r\n`\r\nload_dataset(corpus, language, split=None, use_auth_token=True, cache_dir=cache_folder)`"],"created_at":1637078719000,"updated_at":1638173849000,"closed_at":null,"author_association":"NONE","active_lock_reason":null,"body":"## Dataset viewer issue for '*oscar-corpus\/OSCAR-2109*'\r\n\r\n**Link:** *[link to the dataset viewer page](https:\/\/huggingface.co\/datasets\/oscar-corpus\/OSCAR-2109)*\r\n\r\n*The dataset library cannot download any language from the oscar-corpus\/OSCAR-2109 dataset. By entering the URL in your browser I can access the file.*\r\n\r\n```\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach https:\/\/huggingface.co\/datasets\/oscar-corpus\/OSCAR-2109\/resolve\/main\/OSCAR-2109.py\r\n```\r\n\r\nAm I the one who added this dataset ? No\r\n\r\nUsing the older version of [OSCAR](https:\/\/huggingface.co\/datasets\/oscar) I don't have any issues downloading languages with the dataset library.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3282\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3282\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3281","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3281\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3281\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3281\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3281","id":1055018876,"node_id":"PR_kwDODunzps4umWZE","number":3281,"title":"[Datasets] Improve Covost 2","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["I am trying to use `load_dataset` with the French dataset(common voice corpus 1) which is downloaded from a common voice site and the target language is English (using colab)\r\n\r\nSteps I have followed:\r\n\r\n**1. untar:**\r\n`!tar xvzf fr.tar -C data_dir`\r\n\r\n**2. load data:**\r\n`load_dataset('covost2', 'fr_en', data_dir=\"\/content\/data_dir\")`\r\n\r\n0 rows are loading as shown below:\r\n```\r\nUsing custom data configuration fr_en-data_dir=%2Fcontent%2Fdata_dir\r\nReusing dataset covost2 (\/root\/.cache\/huggingface\/datasets\/covost2\/fr_en-data_dir=%2Fcontent%2Fdata_dir\/1.0.0\/bba950aae1ffa5a14b876b7e09c17b44de2c3cf60e7bd5d459640beffc78e35b)\r\n100%\r\n3\/3 [00:00<00:00, 54.98it\/s]\r\nDatasetDict({\r\n train: Dataset({\r\n features: ['client_id', 'file', 'audio', 'sentence', 'translation', 'id'],\r\n num_rows: 0\r\n })\r\n validation: Dataset({\r\n features: ['client_id', 'file', 'audio', 'sentence', 'translation', 'id'],\r\n num_rows: 0\r\n })\r\n test: Dataset({\r\n features: ['client_id', 'file', 'audio', 'sentence', 'translation', 'id'],\r\n num_rows: 0\r\n })\r\n})\r\n```\r\n\r\nCan you please provide a sample working example code to load the dataset?","Hi ! I think it only works with the subsets of Common Voice Corpus 4, not Common Voice Corpus 1"],"created_at":1637076739000,"updated_at":1643213826000,"closed_at":1637232244000,"author_association":"MEMBER","active_lock_reason":null,"body":"It's currently quite confusing to understand the manual data download instruction of Covost and not very user-friendly.\r\n\r\nCurrenty the user has to:\r\n\r\n1. Go on Common Voice website\r\n2. Find the correct dataset which is **not** mentioned in the error message\r\n3. Download it\r\n4. Untar it\r\n5. Create a language id folder (why? this folder does not exist in the `.tar` downloaded file)\r\n6. pass the folder containing the created language id folder\r\n\r\nThis PR improves this to:\r\n\r\n1. Go on Common Voice website\r\n2. Find the correct dataset which **is** mentioned in the error message\r\n3. Download it\r\n4. Untar it\r\n5. pass the untared folder\r\n\r\n**Note**: This PR is not at all time-critical ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3281\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3281\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3281","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3281","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3281.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3281.patch","merged_at":1637232244000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3280","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3280\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3280\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3280\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3280","id":1054766828,"node_id":"PR_kwDODunzps4ulgye","number":3280,"title":"Fix bookcorpusopen RAM usage","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1637062072000,"updated_at":1637164408000,"closed_at":1637069670000,"author_association":"MEMBER","active_lock_reason":null,"body":"Each document is a full book, so the default arrow writer batch size of 10,000 is too big, and it can fill up RAM quickly before flushing the first batch on disk. I changed its batch size to 256 to use maximum 100MB of memory\r\n\r\nFix #3167.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3280\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3280\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3280","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3280","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3280.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3280.patch","merged_at":1637069670000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3279","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3279\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3279\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3279\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3279","id":1054711852,"node_id":"PR_kwDODunzps4ulVHe","number":3279,"title":"Minor Typo Fix - Precision to Recall","user":{"login":"SebastinSanty","id":13795788,"node_id":"MDQ6VXNlcjEzNzk1Nzg4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13795788?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SebastinSanty","html_url":"https:\/\/github.com\/SebastinSanty","followers_url":"https:\/\/api.github.com\/users\/SebastinSanty\/followers","following_url":"https:\/\/api.github.com\/users\/SebastinSanty\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SebastinSanty\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SebastinSanty\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SebastinSanty\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SebastinSanty\/orgs","repos_url":"https:\/\/api.github.com\/users\/SebastinSanty\/repos","events_url":"https:\/\/api.github.com\/users\/SebastinSanty\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SebastinSanty\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1637058742000,"updated_at":1637061483000,"closed_at":1637061482000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3279\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3279\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3279","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3279","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3279.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3279.patch","merged_at":1637061482000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3278","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3278\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3278\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3278\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3278","id":1054249463,"node_id":"PR_kwDODunzps4uj2EQ","number":3278,"title":"Proposed update to the documentation for WER","user":{"login":"wooters","id":2111202,"node_id":"MDQ6VXNlcjIxMTEyMDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2111202?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/wooters","html_url":"https:\/\/github.com\/wooters","followers_url":"https:\/\/api.github.com\/users\/wooters\/followers","following_url":"https:\/\/api.github.com\/users\/wooters\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/wooters\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/wooters\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/wooters\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/wooters\/orgs","repos_url":"https:\/\/api.github.com\/users\/wooters\/repos","events_url":"https:\/\/api.github.com\/users\/wooters\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/wooters\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1637018911000,"updated_at":1637061577000,"closed_at":1637061577000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"I wanted to submit a minor update to the description of WER for your consideration. \r\n\r\nBecause of the possibility of insertions, the numerator in the WER formula can be larger than N, so the value of WER can be greater than 1.0:\r\n\r\n```\r\n>>> from datasets import load_metric\r\n>>> metric = load_metric(\"wer\")\r\n>>> metric.compute(predictions=[\"hello how are you\"], references=[\"hello\"])\r\n3.0\r\n```\r\n\r\nand similarly from the underlying jiwer module's `wer` function:\r\n\r\n```\r\n>>> from jiwer import wer\r\n>>> wer(\"hello\", \"hello how are you\")\r\n3.0\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3278\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3278\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3278","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3278","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3278.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3278.patch","merged_at":1637061577000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3277","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3277\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3277\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3277\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3277","id":1054122656,"node_id":"PR_kwDODunzps4ujk11","number":3277,"title":"f-string formatting","user":{"login":"Mehdi2402","id":56029953,"node_id":"MDQ6VXNlcjU2MDI5OTUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56029953?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Mehdi2402","html_url":"https:\/\/github.com\/Mehdi2402","followers_url":"https:\/\/api.github.com\/users\/Mehdi2402\/followers","following_url":"https:\/\/api.github.com\/users\/Mehdi2402\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Mehdi2402\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Mehdi2402\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Mehdi2402\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Mehdi2402\/orgs","repos_url":"https:\/\/api.github.com\/users\/Mehdi2402\/repos","events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hello @lhoestq, ```make style``` is applied as asked. :)"],"created_at":1637012225000,"updated_at":1637354408000,"closed_at":1637165918000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"**Fix #3257**\r\n\r\nReplaced _.format()_ and _%_ by f-strings in the following modules : \r\n- [x] **tests**\r\n- [x] **metrics**\r\n- [x] **benchmarks**\r\n- [x] **utils**\r\n- [x] **templates**\r\n- [x] **src\/Datasets\/\\*.py**\r\n\r\nModules in **_src\/Datasets\/_**: \r\n- [x] **commands**\r\n- [x] **features**\r\n- [x] **formatting**\r\n- [x] **io**\r\n- [x] **tasks**\r\n- [x] **utils**\r\n\r\n\r\nModule **datasets** will not be edited as asked by @mariosasko \r\n\r\n-A correction of the first PR (#3267)-\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3277\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3277\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3277","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3277","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3277.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3277.patch","merged_at":1637165918000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3276","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3276\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3276\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3276\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3276","id":1053793063,"node_id":"PR_kwDODunzps4uihih","number":3276,"title":"Update KILT metadata JSON","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636989925000,"updated_at":1637061719000,"closed_at":1637061718000,"author_association":"MEMBER","active_lock_reason":null,"body":"Fix #3265.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3276\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3276\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3276","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3276","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3276.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3276.patch","merged_at":1637061718000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3275","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3275\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3275\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3275\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3275","id":1053698898,"node_id":"PR_kwDODunzps4uiN9t","number":3275,"title":"Force data files extraction if download_mode='force_redownload'","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636984824000,"updated_at":1636987523000,"closed_at":1636987523000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Avoids weird issues when redownloading a dataset due to cached data not being fully updated.\r\n\r\nWith this change, issues #3122 and https:\/\/github.com\/huggingface\/datasets\/issues\/2956 (not a fix, but a workaround) can be fixed as follows:\r\n```python\r\ndset = load_dataset(..., download_mode=\"force_redownload\")\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3275\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3275\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3275","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3275","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3275.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3275.patch","merged_at":1636987523000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3274","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3274\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3274\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3274\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3274","id":1053689140,"node_id":"PR_kwDODunzps4uiL8-","number":3274,"title":"Fix some contact information formats","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["The CI fail are caused by some missing sections or tags, which is unrelated to this PR. Merging !"],"created_at":1636984234000,"updated_at":1636987435000,"closed_at":1636987434000,"author_association":"MEMBER","active_lock_reason":null,"body":"As reported in https:\/\/github.com\/huggingface\/datasets\/issues\/3188 some contact information are not displayed correctly.\r\nThis PR fixes this for CoNLL-2002 and some other datasets with the same issue","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3274\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3274\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3274","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3274","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3274.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3274.patch","merged_at":1636987434000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3273","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3273\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3273\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3273\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3273","id":1053554038,"node_id":"I_kwDODunzps4-y_V2","number":3273,"title":"Respect row ordering when concatenating datasets along axis=1","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636975634000,"updated_at":1637163671000,"closed_at":1637163671000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Currently, there is a bug when concatenating datasets along `axis=1` if more than one dataset has the `_indices` attribute defined. In that scenario, all indices mappings except the first one get ignored.\r\n\r\nA minimal reproducible example:\r\n```python\r\n>>> from datasets import Dataset, concatenate_datasets\r\n>>> a = Dataset.from_dict({\"a\": [30, 20, 10]})\r\n>>> b = Dataset.from_dict({\"b\": [2, 1, 3]})\r\n>>> d = concatenate_datasets([a.sort(\"a\"), b.sort(\"b\")], axis=1)\r\n>>> print(d[:3]) # expected: {'a': [10, 20, 30], 'b': [1, 2, 3]}\r\n{'a': [10, 20, 30], 'b': [3, 1, 2]}\r\n```\r\n\r\nI've noticed the bug while working on #3195. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3273\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3273\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3272","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3272\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3272\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3272\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3272","id":1053516479,"node_id":"I_kwDODunzps4-y2K_","number":3272,"title":"Make iter_archive work with ZIP files","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"Mehdi2402","id":56029953,"node_id":"MDQ6VXNlcjU2MDI5OTUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56029953?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Mehdi2402","html_url":"https:\/\/github.com\/Mehdi2402","followers_url":"https:\/\/api.github.com\/users\/Mehdi2402\/followers","following_url":"https:\/\/api.github.com\/users\/Mehdi2402\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Mehdi2402\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Mehdi2402\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Mehdi2402\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Mehdi2402\/orgs","repos_url":"https:\/\/api.github.com\/users\/Mehdi2402\/repos","events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/received_events","type":"User","site_admin":false},"assignees":[{"login":"Mehdi2402","id":56029953,"node_id":"MDQ6VXNlcjU2MDI5OTUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56029953?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Mehdi2402","html_url":"https:\/\/github.com\/Mehdi2402","followers_url":"https:\/\/api.github.com\/users\/Mehdi2402\/followers","following_url":"https:\/\/api.github.com\/users\/Mehdi2402\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Mehdi2402\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Mehdi2402\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Mehdi2402\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Mehdi2402\/orgs","repos_url":"https:\/\/api.github.com\/users\/Mehdi2402\/repos","events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hello, is this issue open for any contributor ? can I work on it ?\r\n\r\n","Hi ! Sure this is open for any contributor. If you're interested feel free to self-assign this issue to you by commenting `#self-assign`. Then if you have any question or if I can help, feel free to ping me.\r\n\r\nTo begin with, feel free to take a look at both implementations of `iter_archive` for local downloads and for data streaming:\r\n\r\nIn the `DownloadManager` for local dowloads:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/dfa334bd8dc6cbc854b170379c7d2cb7e3d3fe4f\/src\/datasets\/utils\/download_manager.py#L218-L242\r\n\r\nIn the `StreamingDownloadManager` to stream the content of the archive directly from the remote file:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/dfa334bd8dc6cbc854b170379c7d2cb7e3d3fe4f\/src\/datasets\/utils\/streaming_download_manager.py#L502-L526\r\n\r\nNotice the call to `xopen` that opens and streams a file given either an URL or a local path :)","Okay thank you for the information. I will work on this :) ","#self-assign"],"created_at":1636973442000,"updated_at":1637798927000,"closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"body":"Currently users can use `dl_manager.iter_archive` in their dataset script to iterate over all the files of a TAR archive.\r\nIt would be nice if it could work with ZIP files too !","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3272\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3272\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3271","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3271\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3271\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3271\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3271","id":1053482919,"node_id":"PR_kwDODunzps4uhgi1","number":3271,"title":"Decode audio from remote","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636971956000,"updated_at":1637062558000,"closed_at":1637062558000,"author_association":"MEMBER","active_lock_reason":null,"body":"Currently the Audio feature type can only decode local audio files, not remote files.\r\n\r\nTo fix this I replaced `open` with our `xopen` functoin that is compatible with remote files in audio.py\r\n\r\ncc @albertvillanova @mariosasko ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3271\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3271\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3271","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3271","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3271.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3271.patch","merged_at":1637062558000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3270","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3270\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3270\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3270\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3270","id":1053465662,"node_id":"PR_kwDODunzps4uhcxm","number":3270,"title":"Add os.listdir for streaming","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636971244000,"updated_at":1636972023000,"closed_at":1636972023000,"author_association":"MEMBER","active_lock_reason":null,"body":"Extend `os.listdir` to support streaming data from remote files. This is often used to navigate in remote ZIP files for example","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3270\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3270\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3270","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3270","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3270.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3270.patch","merged_at":1636972022000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3269","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3269\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3269\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3269\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3269","id":1053218769,"node_id":"I_kwDODunzps4-xtfR","number":3269,"title":"coqa NonMatchingChecksumError","user":{"login":"ZhaofengWu","id":11954789,"node_id":"MDQ6VXNlcjExOTU0Nzg5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11954789?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ZhaofengWu","html_url":"https:\/\/github.com\/ZhaofengWu","followers_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/followers","following_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/orgs","repos_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/repos","events_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi @ZhaofengWu, thanks for reporting.\r\n\r\nUnfortunately, I'm not able to reproduce your bug:\r\n```python\r\nIn [1]: from datasets import load_dataset\r\n\r\nIn [2]: ds = load_dataset(\"coqa\")\r\nDownloading: 3.82kB [00:00, 1.91MB\/s]\r\nDownloading: 1.79kB [00:00, 1.79MB\/s]\r\nUsing custom data configuration default\r\nDownloading and preparing dataset coqa\/default (download: 55.40 MiB, generated: 18.35 MiB, post-processed: Unknown size, total: 73.75 MiB) to .cache\\coqa\\default\\1.0.0\\553ce70bfdcd15ff4b5f4abc4fc2f37137139cde1f58f4f60384a53a327716f0...\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 49.0M\/49.0M [00:06<00:00, 7.17MB\/s]\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 9.09M\/9.09M [00:01<00:00, 6.08MB\/s]\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2\/2 [00:12<00:00, 6.48s\/it]\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2\/2 [00:00<00:00, 333.26it\/s]\r\nDataset coqa downloaded and prepared to .cache\\coqa\\default\\1.0.0\\553ce70bfdcd15ff4b5f4abc4fc2f37137139cde1f58f4f60384a53a327716f0. Subsequent calls will reuse this data.\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2\/2 [00:00<00:00, 285.49it\/s]\r\n\r\nIn [3]: ds\r\nOut[3]:\r\nDatasetDict({\r\n train: Dataset({\r\n features: ['source', 'story', 'questions', 'answers'],\r\n num_rows: 7199\r\n })\r\n validation: Dataset({\r\n features: ['source', 'story', 'questions', 'answers'],\r\n num_rows: 500\r\n })\r\n})\r\n```\r\n\r\nCould you please give more details about your development environment? You can run the command `datasets-cli env` and copy-and-paste its output:\r\n```\r\n- `datasets` version:\r\n- Platform:\r\n- Python version:\r\n- PyArrow version:\r\n```\r\nIt might be because you are using an old version of `datasets`. Could you please update it (`pip install -U datasets`) and confirm if the problem parsists? ","I'm getting the same error in two separate environments:\r\n```\r\n- `datasets` version: 1.15.1\r\n- Platform: Linux-5.4.0-84-generic-x86_64-with-debian-bullseye-sid\r\n- Python version: 3.7.11\r\n- PyArrow version: 6.0.0\r\n```\r\n\r\n```\r\n- `datasets` version: 1.15.1\r\n- Platform: macOS-10.16-x86_64-i386-64bit\r\n- Python version: 3.9.5\r\n- PyArrow version: 6.0.0\r\n```","I'm sorry, but don't get to reproduce the error in the Linux environment.\r\n\r\n@mariosasko @lhoestq can you reproduce it?","I also can't reproduce the error on Windows\/Linux (tested both the master and the `1.15.1` version). ","Maybe the file had issues during the download ? Could you try to delete your cache and try again ?\r\nBy default the downloads cache is at `~\/.cache\/huggingface\/datasets\/downloads`\r\n\r\nAlso can you check if you have a proxy that could prevent the download to succeed ? Are you able to download those files via your browser ?","I got the same error in a third environment (google cloud) as well. The internet for these three environments are all different so I don't think that's the reason.\r\n```\r\n- `datasets` version: 1.12.1\r\n- Platform: Linux-5.11.0-1022-gcp-x86_64-with-glibc2.31\r\n- Python version: 3.9.7\r\n- PyArrow version: 6.0.0\r\n```\r\nI deleted the entire `~\/.cache\/huggingface\/datasets` on my local mac, and got a different first time error.\r\n```\r\nPython 3.9.5 (default, May 18 2021, 12:31:01) \r\n[Clang 10.0.0 ] :: Anaconda, Inc. on darwin\r\nType \"help\", \"copyright\", \"credits\" or \"license\" for more information.\r\n>>> from datasets import load_dataset\r\n>>> dataset = load_dataset(\"coqa\")\r\nDownloading: 3.82kB [00:00, 1.19MB\/s] \r\nDownloading: 1.79kB [00:00, 712kB\/s] \r\nUsing custom data configuration default\r\nDownloading and preparing dataset coqa\/default (download: 55.40 MiB, generated: 18.35 MiB, post-processed: Unknown size, total: 73.75 MiB) to \/Users\/zhaofengw\/.cache\/huggingface\/datasets\/coqa\/default\/1.0.0\/553ce70bfdcd15ff4b5f4abc4fc2f37137139cde1f58f4f60384a53a327716f0...\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 222\/222 [00:00<00:00, 1.36MB\/s]\r\n 50%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 1\/2 [00:00<00:00, 2.47it\/s]Traceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/Users\/zhaofengw\/miniconda3\/lib\/python3.9\/site-packages\/datasets\/load.py\", line 1632, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/Users\/zhaofengw\/miniconda3\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/Users\/zhaofengw\/miniconda3\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 675, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/Users\/zhaofengw\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/coqa\/553ce70bfdcd15ff4b5f4abc4fc2f37137139cde1f58f4f60384a53a327716f0\/coqa.py\", line 70, in _split_generators\r\n downloaded_files = dl_manager.download_and_extract(urls_to_download)\r\n File \"\/Users\/zhaofengw\/miniconda3\/lib\/python3.9\/site-packages\/datasets\/utils\/download_manager.py\", line 284, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/Users\/zhaofengw\/miniconda3\/lib\/python3.9\/site-packages\/datasets\/utils\/download_manager.py\", line 196, in download\r\n downloaded_path_or_paths = map_nested(\r\n File \"\/Users\/zhaofengw\/miniconda3\/lib\/python3.9\/site-packages\/datasets\/utils\/py_utils.py\", line 216, in map_nested\r\n mapped = [\r\n File \"\/Users\/zhaofengw\/miniconda3\/lib\/python3.9\/site-packages\/datasets\/utils\/py_utils.py\", line 217, in \r\n _single_map_nested((function, obj, types, None, True))\r\n File \"\/Users\/zhaofengw\/miniconda3\/lib\/python3.9\/site-packages\/datasets\/utils\/py_utils.py\", line 152, in _single_map_nested\r\n return function(data_struct)\r\n File \"\/Users\/zhaofengw\/miniconda3\/lib\/python3.9\/site-packages\/datasets\/utils\/download_manager.py\", line 217, in _download\r\n return cached_path(url_or_filename, download_config=download_config)\r\n File \"\/Users\/zhaofengw\/miniconda3\/lib\/python3.9\/site-packages\/datasets\/utils\/file_utils.py\", line 295, in cached_path\r\n output_path = get_from_cache(\r\n File \"\/Users\/zhaofengw\/miniconda3\/lib\/python3.9\/site-packages\/datasets\/utils\/file_utils.py\", line 594, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach https:\/\/nlp.stanford.edu\/data\/coqa\/coqa-dev-v1.0.json\r\n>>> dataset = load_dataset(\"coqa\")\r\nUsing custom data configuration default\r\nDownloading and preparing dataset coqa\/default (download: 55.40 MiB, generated: 18.35 MiB, post-processed: Unknown size, total: 73.75 MiB) to \/Users\/zhaofengw\/.cache\/huggingface\/datasets\/coqa\/default\/1.0.0\/553ce70bfdcd15ff4b5f4abc4fc2f37137139cde1f58f4f60384a53a327716f0...\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 222\/222 [00:00<00:00, 1.38MB\/s]\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2\/2 [00:00<00:00, 6.26it\/s]\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2\/2 [00:00<00:00, 1087.45it\/s]\r\n 50%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 1\/2 [00:45<00:45, 45.60s\/it]\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/Users\/zhaofengw\/miniconda3\/lib\/python3.9\/site-packages\/datasets\/load.py\", line 1632, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/Users\/zhaofengw\/miniconda3\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/Users\/zhaofengw\/miniconda3\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 679, in _download_and_prepare\r\n verify_checksums(\r\n File \"\/Users\/zhaofengw\/miniconda3\/lib\/python3.9\/site-packages\/datasets\/utils\/info_utils.py\", line 40, in verify_checksums\r\n raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\ndatasets.utils.info_utils.NonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/nlp.stanford.edu\/data\/coqa\/coqa-train-v1.0.json', 'https:\/\/nlp.stanford.edu\/data\/coqa\/coqa-dev-v1.0.json']\r\n```\r\nI can access the URL using my browser, though I did notice a redirection -- could that have something to do with it?","Hi @ZhaofengWu, \r\n\r\nWhat about in Google Colab? Can you run this notebook without errors? \r\nhttps:\/\/colab.research.google.com\/drive\/1CCpiiHmtNlfO_4CZ3-fW-TSShr1M0rL4?usp=sharing","I can run your notebook fine, but if I create one myself, it has that error: https:\/\/colab.research.google.com\/drive\/107GIdhrauPO6ZiFDY7G9S74in4qqI2Kx?usp=sharing.\r\n\r\nIt's so funny -- it's like whenever you guys run it it's fine but whenever I run it it fails, whatever the environment is.","I guess it must be some connection issue: the data owner may be blocking requests coming from your country or IP range...","I mean, I don't think google colab sends the connection from my IP. Same applies to google cloud.","Hello, I am having the same error with @ZhaofengWu first with \"social bias frames\" dataset. As I found this report, I tried also \"coqa\" and it fails as well. \r\n\r\nI test this on Google Colab. \r\n\r\n```\r\n- `datasets` version: 1.15.1\r\n- Platform: Linux-5.4.104+-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.12\r\n- PyArrow version: 3.0.0\r\n```\r\n\r\nThen another environment\r\n\r\n```\r\n- `datasets` version: 1.15.1\r\n- Platform: macOS-12.0.1-arm64-arm-64bit\r\n- Python version: 3.9.7\r\n- PyArrow version: 6.0.1\r\n```\r\n\r\nI tried the notebook @albertvillanova provided earlier, and it fails...\r\n","Hi, still not able to reproduce the issue with `coqa`. If you still have this issue, could you please run these additional commands ?\r\n```python\r\n>>> import os\r\n>>> from hashlib import md5\r\n>>> from datasets.utils import DownloadManager, DownloadConfig\r\n>>> path = DownloadManager(download_config=DownloadConfig(use_etag=False)).download(\"https:\/\/nlp.stanford.edu\/data\/coqa\/coqa-dev-v1.0.json\") # it returns the cached file\r\n>>> os.path.getsize(path)\r\n9090845\r\n>>> m = md5()\r\n>>> m.update(open(path, \"rb\").read())\r\n>>> m.hexdigest()\r\n`95d427588e3733e4ebec55f6938dbba6`\r\n>>> open(path).read(500)\r\n'{\\n \"version\": \"1.0\",\\n \"data\": [\\n {\\n \"source\": \"mctest\",\\n \"id\": \"3dr23u6we5exclen4th8uq9rb42tel\",\\n \"filename\": \"mc160.test.41\",\\n \"story\": \"Once upon a time, in a barn near a farm house, there lived a little white kitten named Cotton. Cotton lived high up in a nice warm place above the barn where all of the farmer\\'s horses slept. But Cotton wasn\\'t alone in her little home above the barn, oh no. She shared her hay bed with her mommy and 5 other sisters. All of her sisters w'\r\n```\r\n\r\nThis way we can know whether you downloaded a corrupted file or an error file that could cause the `NonMatchingChecksumError` error to happen","```\r\n>>> import os\r\n>>> from hashlib import md5\r\n>>> from datasets.utils import DownloadManager, DownloadConfig\r\n>>> path = DownloadManager(download_config=DownloadConfig(use_etag=False)).download(\"https:\/\/nlp.stanford.edu\/data\/coqa\/coqa-dev-v1.0.json\") # it returns the cached file\r\n>>> os.path.getsize(path)\r\n222\r\n>>> m = md5()\r\n>>> m.update(open(path, \"rb\").read())\r\n>>> m.hexdigest()\r\n'1195812a37c01a4481a4748c85d0c6a9'\r\n>>> open(path).read(500)\r\n'\\n
503 Service Temporarily Unavailable<\/title><\/head>\\n \\n503 Service Temporarily Unavailable<\/h1><\/center>\\n
nginx\/1.10.3 (Ubuntu)<\/center>\\n<\/body>\\n<\/html>\\n'\r\n```\r\nLooks like there was a server-side error when downloading the dataset? But I don't believe this is a transient error given (a) deleting the cache and re-downloading gives the same error; (b) it happens on multiple platforms with different network configurations; (c) other people are getting this error too, see above. So I'm not sure why it works for some people but not others.","`wget https:\/\/nlp.stanford.edu\/data\/coqa\/coqa-dev-v1.0.json` does work. So I suspect there might be some problem in `datasets`' networking code? Can you give me some snippet that simulates how `datasets` requests the resource which I can run on my end?","There is a redirection -- I don't know if that's the cause.","Ok This is an issue with the server that hosts the data at `https:\/\/nlp.stanford.edu\/nlp\/data` that randomly returns 503 (by trying several times it also happens on my side), hopefully it can be fixed soon. I'll try to reach the people in charge of hosting the data","Thanks. Also it might help to display a more informative error message?","You're right. I just opened a PR that would show this error if it happens again:\r\n```python\r\nConnectionError: Couldn't reach https:\/\/nlp.stanford.edu\/data\/coqa\/coqa-dev-v1.0.json (error 503)\r\n```"],"created_at":1636952647000,"updated_at":1642600699000,"closed_at":1642600699000,"author_association":"NONE","active_lock_reason":null,"body":"```\r\n>>> from datasets import load_dataset\r\n>>> dataset = load_dataset(\"coqa\")\r\nDownloading: 3.82kB [00:00, 1.26MB\/s] \r\nDownloading: 1.79kB [00:00, 733kB\/s] \r\nUsing custom data configuration default\r\nDownloading and preparing dataset coqa\/default (download: 55.40 MiB, generated: 18.35 MiB, post-processed: Unknown size, total: 73.75 MiB) to \/Users\/zhaofengw\/.cache\/huggingface\/datasets\/coqa\/default\/1.0.0\/553ce70bfdcd15ff4b5f4abc4fc2f37137139cde1f58f4f60384a53a327716f0...\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 222\/222 [00:00<00:00, 1.38MB\/s]\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 222\/222 [00:00<00:00, 1.32MB\/s]\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2\/2 [00:01<00:00, 1.91it\/s]\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2\/2 [00:00<00:00, 1117.44it\/s]\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/Users\/zhaofengw\/miniconda3\/lib\/python3.9\/site-packages\/datasets\/load.py\", line 1632, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/Users\/zhaofengw\/miniconda3\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/Users\/zhaofengw\/miniconda3\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 679, in _download_and_prepare\r\n verify_checksums(\r\n File \"\/Users\/zhaofengw\/miniconda3\/lib\/python3.9\/site-packages\/datasets\/utils\/info_utils.py\", line 40, in verify_checksums\r\n raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\ndatasets.utils.info_utils.NonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/nlp.stanford.edu\/data\/coqa\/coqa-train-v1.0.json', 'https:\/\/nlp.stanford.edu\/data\/coqa\/coqa-dev-v1.0.json']\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3269\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3269\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3268","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3268\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3268\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3268\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3268","id":1052992681,"node_id":"I_kwDODunzps4-w2Sp","number":3268,"title":"Dataset viewer issue for 'liweili\/c4_200m'","user":{"login":"liliwei25","id":22389228,"node_id":"MDQ6VXNlcjIyMzg5MjI4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22389228?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/liliwei25","html_url":"https:\/\/github.com\/liliwei25","followers_url":"https:\/\/api.github.com\/users\/liliwei25\/followers","following_url":"https:\/\/api.github.com\/users\/liliwei25\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/liliwei25\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/liliwei25\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/liliwei25\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/liliwei25\/orgs","repos_url":"https:\/\/api.github.com\/users\/liliwei25\/repos","events_url":"https:\/\/api.github.com\/users\/liliwei25\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/liliwei25\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"assignees":[{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi ! I think the issue comes from this [line](https:\/\/huggingface.co\/datasets\/liweili\/c4_200m\/blob\/main\/c4_200m.py#L87):\r\n```python\r\npath = filepath + \"\/*.tsv*\"\r\n```\r\n\r\nYou can fix this by doing this instead:\r\n```python\r\npath = os.path.join(filepath, \"\/*.tsv*\")\r\n```\r\n\r\nHere is why:\r\n\r\nLocally you can append `\"\/*.tsv*\"` to your local path, however it doesn't work in streaming mode, and the dataset viewer does use the streaming mode.\r\nIn streaming mode, the download and extract part is done lazily. It means that instead of using local paths, it's still passing around URLs and [chained URLs](https:\/\/filesystem-spec.readthedocs.io\/en\/latest\/features.html#url-chaining)\r\n\r\nTherefore in streaming mode, `filepath` is not a local path, but instead is equal to\r\n```python\r\nzip:\/\/::https:\/\/huggingface.co\/datasets\/liweili\/c4_200m\/resolve\/main\/data.zip\r\n```\r\nThe `zip:\/\/` part means that we navigate inside the remote ZIP file.\r\n\r\nYou must use `os.path.join` to navigate inside it and get your TSV files:\r\n```python\r\n>>> os.path.join(filepath, \"\/*.tsv*\")\r\nzip:\/\/*.tsv*::https:\/\/huggingface.co\/datasets\/liweili\/c4_200m\/resolve\/main\/data.zip\r\n```\r\n\r\n`datasets` extends `os.path.join`, `glob.glob`, etc. in your dataset scripts to work with remote files.","hi @lhoestq ! thanks for the tip! i've updated the line of code but it's still not working. am i doing something else wrong? thank you!","Hi ! Your dataset code is all good now :)\r\n```python\r\nIn [1]: from datasets import load_dataset\r\n\r\nIn [2]: d = load_dataset(\"liweili\/c4_200m\", streaming=True)\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2.79k\/2.79k [00:00<00:00, 4.83MB\/s]\r\nUsing custom data configuration default\r\n\r\nIn [3]: next(iter(d[\"train\"]))\r\nOut[3]: \r\n{'input': 'Bitcoin is for $7,094 this morning, which CoinDesk says.',\r\n 'output': 'Bitcoin goes for $7,094 this morning, according to CoinDesk.'}\r\n```\r\nThough the viewer doesn't seem to be updated, I'll take a look at what's wrong","thank you @lhoestq! \ud83d\ude04 ","It's working\r\n\r\n
\r\n\r\n"],"created_at":1636910326000,"updated_at":1640082320000,"closed_at":1640082291000,"author_association":"NONE","active_lock_reason":null,"body":"## Dataset viewer issue for '*liweili\/c4_200m*'\r\n\r\n**Link:** *[link to the dataset viewer page](https:\/\/huggingface.co\/datasets\/liweili\/c4_200m)*\r\n\r\n*Server Error*\r\n```\r\nStatus code: 404\r\nException: Status404Error\r\nMessage: Not found. Maybe the cache is missing, or maybe the ressource does not exist.\r\n```\r\n\r\nAm I the one who added this dataset ? Yes\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3268\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3268\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3267","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3267\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3267\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3267\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3267","id":1052750084,"node_id":"PR_kwDODunzps4ufQzB","number":3267,"title":"Replacing .format() and % by f-strings","user":{"login":"Mehdi2402","id":56029953,"node_id":"MDQ6VXNlcjU2MDI5OTUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56029953?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Mehdi2402","html_url":"https:\/\/github.com\/Mehdi2402","followers_url":"https:\/\/api.github.com\/users\/Mehdi2402\/followers","following_url":"https:\/\/api.github.com\/users\/Mehdi2402\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Mehdi2402\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Mehdi2402\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Mehdi2402\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Mehdi2402\/orgs","repos_url":"https:\/\/api.github.com\/users\/Mehdi2402\/repos","events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi ! It looks like most of your changes are just `black` changes. All those changes are not necessary. In particular if you want to use `black`, please use the `make style` command instead. It runs `black` with additional parameters and you shouldn't end up with that many changes\r\n\r\nFeel free to open a new PR that doesn't include all the unnecessary `black` changes that you have on your branch :)","> Hi ! It looks like most of your changes are just `black` changes. All those changes are not necessary. In particular if you want to use `black`, please use the `make style` command instead. It runs `black` with additional parameters and you shouldn't end up with that many changes\r\n> \r\n> Feel free to open a new PR that doesn't include all the unnecessary `black` changes that you have on your branch :)\r\n\r\nThank you for your answer :) , I will open a new PR with the correct changes.","Hi @lhoestq, I submitted 3 commits in a new PR (#3277) where I did not apply black.\r\n\r\nI can apply the ```make style``` command if asked.","Cool thanks ! Yes feel free to make sure you have `black==21.4b0` and run `make style`"],"created_at":1636830722000,"updated_at":1637096426000,"closed_at":1637074543000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"**Fix #3257**\r\n\r\nReplaced _.format()_ and _%_ by f-strings in the following modules : \r\n- [x] **tests**\r\n- [x] **metrics**\r\n- [x] **benchmarks**\r\n- [x] **utils**\r\n- [x] **templates**\r\n\r\nWill follow in the next PR the modules left : \r\n- [ ] **src**\r\n\r\nModule **datasets** will not be edited as asked by @mariosasko \r\n\r\nPS : black and isort applied to files\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3267\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3267\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3267","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3267","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3267.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3267.patch","merged_at":null},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3266","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3266\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3266\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3266\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3266","id":1052700155,"node_id":"PR_kwDODunzps4ufH94","number":3266,"title":"Fix URLs for WikiAuto Manual, jeopardy and definite_pronoun_resolution","user":{"login":"LashaO","id":28014149,"node_id":"MDQ6VXNlcjI4MDE0MTQ5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28014149?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/LashaO","html_url":"https:\/\/github.com\/LashaO","followers_url":"https:\/\/api.github.com\/users\/LashaO\/followers","following_url":"https:\/\/api.github.com\/users\/LashaO\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/LashaO\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/LashaO\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/LashaO\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/LashaO\/orgs","repos_url":"https:\/\/api.github.com\/users\/LashaO\/repos","events_url":"https:\/\/api.github.com\/users\/LashaO\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/LashaO\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["There seems to be problems with datasets metadata, of which I dont have access to. I think one of the datasets is from reddit. Can anyone help?","Hello @LashaO , I think the errors were caused by `_DATA_FILES` in `definite_pronoun_resolution.py`. Here are details of the test error.\r\n```\r\nself = BuilderConfig(name='plain_text', version=1.0.0, data_dir=None, data_files={'train': 'train.c.txt', 'test': 'test.c.txt'}, description='Plain text import of the Definite Pronoun Resolution Dataset.')\r\n\r\n def __post_init__(self):\r\n # The config name is used to name the cache directory.\r\n invalid_windows_characters = r\"<>:\/\\|?*\"\r\n for invalid_char in invalid_windows_characters:\r\n if invalid_char in self.name:\r\n raise InvalidConfigName(\r\n f\"Bad characters from black list '{invalid_windows_characters}' found in '{self.name}'. \"\r\n f\"They could create issues when creating a directory for this config on Windows filesystem.\"\r\n )\r\n if self.data_files is not None and not isinstance(self.data_files, DataFilesDict):\r\n> raise ValueError(f\"Expected a DataFilesDict in data_files but got {self.data_files}\")\r\nE ValueError: Expected a DataFilesDict in data_files but got {'train': 'train.c.txt', 'test': 'test.c.txt'}\r\n```","Hi ! Thanks for the fixes :)\r\n\r\nInstead of uploading the `definite_pronoun_resolution` data files in this PR, maybe we can just update the URL ?\r\nThe old url was http:\/\/www.hlt.utdallas.edu\/~vince\/data\/emnlp12\/train.c.txt, but now it's https:\/\/www.hlt.utdallas.edu\/~vince\/data\/emnlp12\/train.c.txt (https instead of http)","Actually the bad certificate creates an issue with the download\r\n```python\r\nimport datasets \r\ndatasets.DownloadManager().download(\"https:\/\/www.hlt.utdallas.edu\/~vince\/data\/emnlp12\/train.c.txt\")\r\n# raises: ConnectionError: Couldn't reach https:\/\/www.hlt.utdallas.edu\/~vince\/data\/emnlp12\/train.c.txt\r\n```\r\n\r\nLet me see if I can fix that","I uploaded them to these URLs, feel free to use them instead of having the text files here in the PR :)\r\nhttps:\/\/s3.amazonaws.com\/datasets.huggingface.co\/definite_pronoun_resolution\/train.c.txt\r\nhttps:\/\/s3.amazonaws.com\/datasets.huggingface.co\/definite_pronoun_resolution\/test.c.txt","Thank you for the tips! Having a busy week so anyone willing to commit the suggestions is welcome. Else, I will try to get back to this in a while.","@LashaO Thanks for working on this. Yes, I'll take over as we already have a request to fix the URL of the Jeopardy! dataset in a separate issue.","~~Still have to fix the error in the dummy data test of the WikiAuto dataset (so please don't merge).~~ Done! Ready for merging.","Thank you, Mario!","The CI failure is only related to missing tags in the dataset cards, merging :)"],"created_at":1636815694000,"updated_at":1638789391000,"closed_at":1638789391000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"[#3264](https:\/\/github.com\/huggingface\/datasets\/issues\/3264)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3266\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3266\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3266","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3266","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3266.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3266.patch","merged_at":1638789391000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3265","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3265\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3265\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3265\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3265","id":1052666558,"node_id":"I_kwDODunzps4-vmq-","number":3265,"title":"Checksum error for kilt_task_wow","user":{"login":"slyviacassell","id":22296717,"node_id":"MDQ6VXNlcjIyMjk2NzE3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22296717?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/slyviacassell","html_url":"https:\/\/github.com\/slyviacassell","followers_url":"https:\/\/api.github.com\/users\/slyviacassell\/followers","following_url":"https:\/\/api.github.com\/users\/slyviacassell\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/slyviacassell\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/slyviacassell\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/slyviacassell\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/slyviacassell\/orgs","repos_url":"https:\/\/api.github.com\/users\/slyviacassell\/repos","events_url":"https:\/\/api.github.com\/users\/slyviacassell\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/slyviacassell\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Using `dataset = load_dataset(\"kilt_tasks\", \"wow\", ignore_verifications=True)` may fix it, but I do not think it is a elegant solution.","Hi @slyviacassell, thanks for reporting.\r\n\r\nYes, there is an issue with the checksum verification. I'm fixing it.\r\n\r\nAnd as you pointed out, in the meantime, you can circumvent the problem by passing `ignore_verifications=True`. "],"created_at":1636805057000,"updated_at":1637061833000,"closed_at":1637061718000,"author_association":"NONE","active_lock_reason":null,"body":"## Describe the bug\r\nChecksum failed when downloads kilt_tasks_wow. See error output for details.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport datasets\r\ndatasets.load_datasets('kilt_tasks','wow')\r\n```\r\n\r\n## Expected results\r\nDownload successful\r\n\r\n## Actual results\r\n```\r\nDownloading and preparing dataset kilt_tasks\/wow (download: 72.07 MiB, generated: 61.82 MiB, post-processed: Unknown size, total: 133.89 MiB) to \/root\/.cache\/huggingface\/datasets\/kilt_tasks\/wow\/1.0.0\/57dc8b2431e76637e0c6ef79689ca4af61ed3a330e2e0cd62c8971465a35db3a...\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 3\/3 [00:00<00:00, 5121.25it\/s]\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 3\/3 [00:00<00:00, 1527.42it\/s]\r\nTraceback (most recent call last):\r\n File \"kilt_wow.py\", line 30, in \r\n main()\r\n File \"kilt_wow.py\", line 27, in main\r\n train, dev, test = dataset.generate_k_shot_data(k=32, seed=seed, path=\"..\/data\/\")\r\n File \"\/workspace\/projects\/CrossFit\/tasks\/fewshot_gym_dataset.py\", line 79, in generate_k_shot_data\r\n dataset = self.load_dataset()\r\n File \"kilt_wow.py\", line 21, in load_dataset\r\n return datasets.load_dataset('kilt_tasks','wow')\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 1632, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 679, in _download_and_prepare\r\n verify_checksums(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/info_utils.py\", line 40, in verify_checksums\r\n raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\ndatasets.utils.info_utils.NonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['http:\/\/dl.fbaipublicfiles.com\/KILT\/wow-train-kilt.jsonl', 'http:\/\/dl.fbaipublicfiles.com\/KILT\/wow-dev-kilt.jsonl']\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.15.1\r\n- Platform: Linux-4.15.0-161-generic-x86_64-with-glibc2.10\r\n- Python version: 3.8.3\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3265\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3265\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3264","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3264\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3264\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3264\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3264","id":1052663513,"node_id":"I_kwDODunzps4-vl7Z","number":3264,"title":"Downloading URL change for WikiAuto Manual, jeopardy and definite_pronoun_resolution","user":{"login":"slyviacassell","id":22296717,"node_id":"MDQ6VXNlcjIyMjk2NzE3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22296717?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/slyviacassell","html_url":"https:\/\/github.com\/slyviacassell","followers_url":"https:\/\/api.github.com\/users\/slyviacassell\/followers","following_url":"https:\/\/api.github.com\/users\/slyviacassell\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/slyviacassell\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/slyviacassell\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/slyviacassell\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/slyviacassell\/orgs","repos_url":"https:\/\/api.github.com\/users\/slyviacassell\/repos","events_url":"https:\/\/api.github.com\/users\/slyviacassell\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/slyviacassell\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["#take\r\nI am willing to fix this. Links can be replaced for WikiAuto Manual and jeopardy with new ones provided by authors.\r\n\r\nAs for the definite_pronoun_resolution URL, a certificate error seems to be preventing a download. I have the files on my local machine. I can include them in the dataset folder as the files are <1MB in size total.","> #take I am willing to fix this. Links can be replaced for WikiAuto Manual and jeopardy.\r\n> \r\n> As for the definite_pronoun_resolution URL, a certificate error seems to be preventing a download. I have the files on my local machine. Anyone has opinions on whether it is preferable for me to host them somewhere (e.g. personal GDrive account) or upload them to the dataset folder directly and use github raw URLs? The files are <1MB in size.\r\n\r\nI am planning to fix it next few days. But my to-do list is full and I do not have the cache of definite_pronoun_resolution. I am glad that you can take this. Thanks a lot!","No problem, buddy! Will submit a PR over this weekend."],"created_at":1636804032000,"updated_at":1636810761000,"closed_at":null,"author_association":"NONE","active_lock_reason":null,"body":"## Describe the bug\r\n- WikiAuto Manual \r\nThe original manual datasets with the following downloading URL in this [repository](https:\/\/github.com\/chaojiang06\/wiki-auto) was [deleted](https:\/\/github.com\/chaojiang06\/wiki-auto\/commit\/0af9b066f2b4e02726fb8a9be49283c0ad25367f) by the author. \r\n```\r\nhttps:\/\/github.com\/chaojiang06\/wiki-auto\/raw\/master\/wiki-manual\/train.tsv\r\n```\r\n\r\n- jeopardy \r\nThe downloading URL for jeopardy may move from \r\n```\r\nhttp:\/\/skeeto.s3.amazonaws.com\/share\/JEOPARDY_QUESTIONS1.json.gz\r\n```\r\n to \r\n```\r\nhttps:\/\/drive.google.com\/file\/d\/0BwT5wj_P7BKXb2hfM3d2RHU1ckE\/view?resourcekey=0-1abK4cJq-mqxFoSg86ieIg\r\n```\r\n\r\n- definite_pronoun_resolution\r\nThe following downloading URL for definite_pronoun_resolution cannot be reached for some reasons.\r\n```\r\nhttp:\/\/www.hlt.utdallas.edu\/~vince\/data\/emnlp12\/train.c.txt\r\n```\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport datasets\r\ndatasets.load_datasets('wiki_auto','manual')\r\ndatasets.load_datasets('jeopardy')\r\ndatasets.load_datasets('definite_pronoun_resolution')\r\n```\r\n\r\n## Expected results\r\nDownload successfully \r\n\r\n## Actual results\r\n- WikiAuto Manual \r\n```\r\nDownloading and preparing dataset wiki_auto\/manual (download: 151.65 MiB, generated: 155.97 MiB, post-processed: Unknown size, total: 307.61 MiB) to \/root\/.cache\/huggingface\/datasets\/wiki_auto\/manual\/1.0.0\/5ffdd9fc62422d29bd02675fb9606f77c1251ee17169ac10b143ce07ef2f4db8...\r\n 0%| | 0\/3 [00:00, ?it\/s]Traceback (most recent call last):\r\n File \"wiki_auto.py\", line 43, in \r\n main()\r\n File \"wiki_auto.py\", line 40, in main\r\n train, dev, test = dataset.generate_k_shot_data(k=16, seed=seed, path=\"..\/data\/\")\r\n File \"\/workspace\/projects\/CrossFit\/tasks\/fewshot_gym_dataset.py\", line 24, in generate_k_shot_data\r\n dataset = self.load_dataset()\r\n File \"wiki_auto.py\", line 34, in load_dataset\r\n return datasets.load_dataset('wiki_auto', 'manual')\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 1632, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 675, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/root\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/wiki_auto\/5ffdd9fc62422d29bd02675fb9606f77c1251ee17169ac10b143ce07ef2f4db8\/wiki_auto.py\", line 193, in _split_generators\r\n data_dir = dl_manager.download_and_extract(my_urls)\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 284, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 196, in download\r\n downloaded_path_or_paths = map_nested(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 216, in map_nested\r\n mapped = [\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 217, in \r\n _single_map_nested((function, obj, types, None, True))\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 152, in _single_map_nested\r\n return function(data_struct)\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 217, in _download\r\n return cached_path(url_or_filename, download_config=download_config)\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 295, in cached_path\r\n output_path = get_from_cache(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 592, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/github.com\/chaojiang06\/wiki-auto\/raw\/master\/wiki-manual\/train.tsv\r\n```\r\n- jeopardy\r\n```\r\nUsing custom data configuration default\r\nDownloading and preparing dataset jeopardy\/default (download: 12.13 MiB, generated: 34.46 MiB, post-processed: Unknown size, total: 46.59 MiB) to \/root\/.cache\/huggingface\/datasets\/jeopardy\/default\/0.1.0\/25ee3e4a73755e637b8810f6493fd36e4523dea3ca8a540529d0a6e24c7f9810...\r\nTraceback (most recent call last):\r\n File \"jeopardy.py\", line 45, in \r\n main()\r\n File \"jeopardy.py\", line 42, in main\r\n train, dev, test = dataset.generate_k_shot_data(k=32, seed=seed, path=\"..\/data\/\")\r\n File \"\/workspace\/projects\/CrossFit\/tasks\/fewshot_gym_dataset.py\", line 79, in generate_k_shot_data\r\n dataset = self.load_dataset()\r\n File \"jeopardy.py\", line 36, in load_dataset\r\n return datasets.load_dataset(\"jeopardy\")\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 1632, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 675, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/root\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/jeopardy\/25ee3e4a73755e637b8810f6493fd36e4523dea3ca8a540529d0a6e24c7f9810\/jeopardy.py\", line 72, in _split_generators\r\n filepath = dl_manager.download_and_extract(_DATA_URL)\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 284, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 196, in download\r\n downloaded_path_or_paths = map_nested(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 206, in map_nested\r\n return function(data_struct)\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 217, in _download\r\n return cached_path(url_or_filename, download_config=download_config)\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 295, in cached_path\r\n output_path = get_from_cache(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 594, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach http:\/\/skeeto.s3.amazonaws.com\/share\/JEOPARDY_QUESTIONS1.json.gz\r\n```\r\n- definite_pronoun_resolution\r\n```\r\nDownloading and preparing dataset definite_pronoun_resolution\/plain_text (download: 222.12 KiB, generated: 239.12 KiB, post-processed: Unknown size, total: 461.24 KiB) to \/root\/.cache\/huggingface\/datasets\/definite_pronoun_resolution\/plain_text\/1.0.0\/35a1dfd4fba4afb8ba226cbbb65ac7cef0dd3cf9302d8f803740f05d2f16ceff...\r\n 0%| | 0\/2 [00:00, ?it\/s]Traceback (most recent call last):\r\n File \"definite_pronoun_resolution.py\", line 37, in \r\n main()\r\n File \"definite_pronoun_resolution.py\", line 34, in main\r\n train, dev, test = dataset.generate_k_shot_data(k=32, seed=seed, path=\"..\/data\/\")\r\n File \"\/workspace\/projects\/CrossFit\/tasks\/fewshot_gym_dataset.py\", line 79, in generate_k_shot_data\r\n dataset = self.load_dataset()\r\n File \"definite_pronoun_resolution.py\", line 28, in load_dataset\r\n return datasets.load_dataset('definite_pronoun_resolution')\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 1632, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 675, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/root\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/definite_pronoun_resolution\/35a1dfd4fba4afb8ba226cbbb65ac7cef0dd3cf9302d8f803740f05d2f16ceff\/definite_pronoun_resolution.py\", line 76, in _split_generators\r\n files = dl_manager.download_and_extract(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 284, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 196, in download\r\n downloaded_path_or_paths = map_nested(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 216, in map_nested\r\n mapped = [\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 217, in \r\n _single_map_nested((function, obj, types, None, True))\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 152, in _single_map_nested\r\n return function(data_struct)\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 217, in _download\r\n return cached_path(url_or_filename, download_config=download_config)\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 295, in cached_path\r\n output_path = get_from_cache(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 594, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach http:\/\/www.hlt.utdallas.edu\/~vince\/data\/emnlp12\/train.c.txt\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.15.1\r\n- Platform: Linux-4.15.0-161-generic-x86_64-with-glibc2.10\r\n- Python version: 3.8.3\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3264\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3264\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3263","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3263\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3263\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3263\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3263","id":1052552516,"node_id":"I_kwDODunzps4-vK1E","number":3263,"title":"FET DATA","user":{"login":"FStell01","id":90987031,"node_id":"MDQ6VXNlcjkwOTg3MDMx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90987031?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/FStell01","html_url":"https:\/\/github.com\/FStell01","followers_url":"https:\/\/api.github.com\/users\/FStell01\/followers","following_url":"https:\/\/api.github.com\/users\/FStell01\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/FStell01\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/FStell01\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/FStell01\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/FStell01\/orgs","repos_url":"https:\/\/api.github.com\/users\/FStell01\/repos","events_url":"https:\/\/api.github.com\/users\/FStell01\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/FStell01\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636782366000,"updated_at":1636810307000,"closed_at":1636810307000,"author_association":"NONE","active_lock_reason":null,"body":"## Adding a Dataset\n- **Name:** *name of the dataset*\n- **Description:** *short description of the dataset (or link to social media or blog post)*\n- **Paper:** *link to the dataset paper if available*\n- **Data:** *link to the Github repository or current dataset location*\n- **Motivation:** *what are some good reasons to have this dataset*\n\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3263\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3263\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3262","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3262\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3262\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3262\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3262","id":1052455082,"node_id":"PR_kwDODunzps4uej4t","number":3262,"title":"asserts replaced with exception for image classification task, csv, json","user":{"login":"manisnesan","id":153142,"node_id":"MDQ6VXNlcjE1MzE0Mg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/153142?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/manisnesan","html_url":"https:\/\/github.com\/manisnesan","followers_url":"https:\/\/api.github.com\/users\/manisnesan\/followers","following_url":"https:\/\/api.github.com\/users\/manisnesan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/manisnesan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/manisnesan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/manisnesan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/manisnesan\/orgs","repos_url":"https:\/\/api.github.com\/users\/manisnesan\/repos","events_url":"https:\/\/api.github.com\/users\/manisnesan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/manisnesan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636756499000,"updated_at":1636974517000,"closed_at":1636974517000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Fixes for csv, json in io module and image_classification task with tests referenced in https:\/\/github.com\/huggingface\/datasets\/issues\/3171","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3262\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3262\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3262","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3262","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3262.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3262.patch","merged_at":1636974517000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3261","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3261\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3261\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3261\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3261","id":1052346381,"node_id":"I_kwDODunzps4-uYgN","number":3261,"title":"Scifi_TV_Shows: Having trouble getting viewer to find appropriate files","user":{"login":"lara-martin","id":37913218,"node_id":"MDQ6VXNlcjM3OTEzMjE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/37913218?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lara-martin","html_url":"https:\/\/github.com\/lara-martin","followers_url":"https:\/\/api.github.com\/users\/lara-martin\/followers","following_url":"https:\/\/api.github.com\/users\/lara-martin\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lara-martin\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lara-martin\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lara-martin\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lara-martin\/orgs","repos_url":"https:\/\/api.github.com\/users\/lara-martin\/repos","events_url":"https:\/\/api.github.com\/users\/lara-martin\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lara-martin\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi ! I think this is because `iter_archive` doesn't support ZIP files yet. See https:\/\/github.com\/huggingface\/datasets\/issues\/3272\r\n\r\nYou can navigate into the archive this way instead:\r\n```python\r\n# in split_generators\r\ndata_dir = dl_manager.download_and_extract(url)\r\ntrain_filepath = os.path.join(data_dir, \"all-sci-fi-data-train.txt\")\r\nreturn [\r\n datasets.SplitGenerator(\r\n name=datasets.Split.TRAIN,\r\n gen_kwargs={\r\n \"filepath\": train_filepath,\r\n },\r\n ),\r\n...\r\n])\r\n\r\n# in generate_examples\r\nwith open(filepath, encoding=\"utf-8\") as f:\r\n ...\r\n```","It's working: https:\/\/huggingface.co\/datasets\/lara-martin\/Scifi_TV_Shows\/viewer\/Scifi_TV_Shows\/test\r\n\r\n
\r\n"],"created_at":1636745119000,"updated_at":1640082250000,"closed_at":1640082250000,"author_association":"NONE","active_lock_reason":null,"body":"## Dataset viewer issue for '*Science Fiction TV Show Plots Corpus (Scifi_TV_Shows)*'\r\n\r\n**Link:** [link](https:\/\/huggingface.co\/datasets\/lara-martin\/Scifi_TV_Shows)\r\n\r\nI tried adding both a script (https:\/\/huggingface.co\/datasets\/lara-martin\/Scifi_TV_Shows\/blob\/main\/Scifi_TV_Shows.py) and some dummy examples (https:\/\/huggingface.co\/datasets\/lara-martin\/Scifi_TV_Shows\/tree\/main\/dummy), but the viewer still has a 404 error (\"Not found. Maybe the cache is missing, or maybe the ressource does not exist.\"). I'm not sure what to try next. Thanks in advance!\r\n\r\nAm I the one who added this dataset? Yes\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3261\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3261\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3260","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3260\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3260\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3260\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3260","id":1052247373,"node_id":"PR_kwDODunzps4ueCIU","number":3260,"title":"Fix ConnectionError in Scielo dataset","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["The CI error is unrelated to the change."],"created_at":1636740157000,"updated_at":1637086697000,"closed_at":1637085322000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"This PR:\r\n* allows 403 status code in HEAD requests to S3 buckets to fix the connection error in the Scielo dataset (instead of `url`, uses `response.url` to check the URL of the final endpoint)\r\n* makes the Scielo dataset streamable\r\n\r\nFixes #3255. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3260\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3260\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3260","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3260","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3260.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3260.patch","merged_at":1637085322000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3259","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3259\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3259\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3259\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3259","id":1052189775,"node_id":"PR_kwDODunzps4ud5W3","number":3259,"title":"Updating details of IRC disentanglement data","user":{"login":"jkkummerfeld","id":1298052,"node_id":"MDQ6VXNlcjEyOTgwNTI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1298052?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jkkummerfeld","html_url":"https:\/\/github.com\/jkkummerfeld","followers_url":"https:\/\/api.github.com\/users\/jkkummerfeld\/followers","following_url":"https:\/\/api.github.com\/users\/jkkummerfeld\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jkkummerfeld\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jkkummerfeld\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jkkummerfeld\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jkkummerfeld\/orgs","repos_url":"https:\/\/api.github.com\/users\/jkkummerfeld\/repos","events_url":"https:\/\/api.github.com\/users\/jkkummerfeld\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jkkummerfeld\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Thank you for the cleanup!"],"created_at":1636737418000,"updated_at":1637255973000,"closed_at":1637255973000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"I was pleasantly surprised to find that someone had already added my dataset to the huggingface library, but some details were missing or incorrect. This PR fixes the documentation.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3259\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3259\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3259","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3259","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3259.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3259.patch","merged_at":1637255973000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3258","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3258\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3258\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3258\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3258","id":1052188195,"node_id":"I_kwDODunzps4-tx4j","number":3258,"title":"Reload dataset that was already downloaded with `load_from_disk` from cloud storage","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636737299000,"updated_at":1636737299000,"closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"body":"`load_from_disk` downloads the dataset to a temporary directory without checking if the dataset has already been downloaded once.\r\nIt would be nice to have some sort of caching for datasets downloaded this way. This could leverage the fingerprint of the dataset that was saved in the `state.json` file.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3258\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3258\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3257","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3257\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3257\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3257\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3257","id":1052118365,"node_id":"I_kwDODunzps4-tg1d","number":3257,"title":"Use f-strings for string formatting ","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892877,"node_id":"MDU6TGFiZWwxOTM1ODkyODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/good%20first%20issue","name":"good first issue","color":"7057ff","default":true,"description":"Good for newcomers"}],"state":"closed","locked":false,"assignee":{"login":"Mehdi2402","id":56029953,"node_id":"MDQ6VXNlcjU2MDI5OTUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56029953?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Mehdi2402","html_url":"https:\/\/github.com\/Mehdi2402","followers_url":"https:\/\/api.github.com\/users\/Mehdi2402\/followers","following_url":"https:\/\/api.github.com\/users\/Mehdi2402\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Mehdi2402\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Mehdi2402\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Mehdi2402\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Mehdi2402\/orgs","repos_url":"https:\/\/api.github.com\/users\/Mehdi2402\/repos","events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/received_events","type":"User","site_admin":false},"assignees":[{"login":"Mehdi2402","id":56029953,"node_id":"MDQ6VXNlcjU2MDI5OTUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56029953?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Mehdi2402","html_url":"https:\/\/github.com\/Mehdi2402","followers_url":"https:\/\/api.github.com\/users\/Mehdi2402\/followers","following_url":"https:\/\/api.github.com\/users\/Mehdi2402\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Mehdi2402\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Mehdi2402\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Mehdi2402\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Mehdi2402\/orgs","repos_url":"https:\/\/api.github.com\/users\/Mehdi2402\/repos","events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi, I would be glad to help with this. Is there anyone else working on it?","Hi, I would be glad to work on this too.","#self-assign","Hi @Carlosbogo,\r\n\r\nwould you be interested in replacing the `.format` and `%` syntax with f-strings in the modules in the `datasets` directory since @Mehdi2402 has opened a PR that does that for all the other directories?","Oh I see. I will be glad to help with the `datasets` directory then."],"created_at":1636732935000,"updated_at":1637165918000,"closed_at":1637165918000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"f-strings offer better readability\/performance than `str.format` and `%`, so we should use them in all places in our codebase unless there is good reason to keep the older syntax.\r\n\r\n> **NOTE FOR CONTRIBUTORS**: To avoid large PRs and possible merge conflicts, do 1-3 modules per PR. Also, feel free to ignore the files located under `datasets\/*`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3257\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3257\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3256","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3256\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3256\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3256\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3256","id":1052000613,"node_id":"PR_kwDODunzps4udTqg","number":3256,"title":"asserts replaced by exception for text classification task with test.","user":{"login":"manisnesan","id":153142,"node_id":"MDQ6VXNlcjE1MzE0Mg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/153142?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/manisnesan","html_url":"https:\/\/github.com\/manisnesan","followers_url":"https:\/\/api.github.com\/users\/manisnesan\/followers","following_url":"https:\/\/api.github.com\/users\/manisnesan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/manisnesan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/manisnesan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/manisnesan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/manisnesan\/orgs","repos_url":"https:\/\/api.github.com\/users\/manisnesan\/repos","events_url":"https:\/\/api.github.com\/users\/manisnesan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/manisnesan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Haha it looks like you got the chance of being reviewed twice at the same time and got the same suggestion twice x)\r\nAnyway it's all good now so we can merge !","Thanks for the feedback. "],"created_at":1636725936000,"updated_at":1636729773000,"closed_at":1636729172000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"I have replaced only a single assert in text_classification.py along with a unit test to verify an exception is raised based on https:\/\/github.com\/huggingface\/datasets\/issues\/3171 . \r\n\r\nI would like to first understand the code contribution workflow. So keeping the change to a single file rather than making too many changes. Once this gets approved, I will look into the rest. \r\n\r\nThanks. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3256\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3256\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3256","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3256","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3256.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3256.patch","merged_at":1636729172000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3255","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3255\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3255\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3255\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3255","id":1051783129,"node_id":"I_kwDODunzps4-sO_Z","number":3255,"title":"SciELO dataset ConnectionError","user":{"login":"WojciechKusa","id":2575047,"node_id":"MDQ6VXNlcjI1NzUwNDc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2575047?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/WojciechKusa","html_url":"https:\/\/github.com\/WojciechKusa","followers_url":"https:\/\/api.github.com\/users\/WojciechKusa\/followers","following_url":"https:\/\/api.github.com\/users\/WojciechKusa\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/WojciechKusa\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/WojciechKusa\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/WojciechKusa\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/WojciechKusa\/orgs","repos_url":"https:\/\/api.github.com\/users\/WojciechKusa\/repos","events_url":"https:\/\/api.github.com\/users\/WojciechKusa\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/WojciechKusa\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636711034000,"updated_at":1637085322000,"closed_at":1637085322000,"author_association":"NONE","active_lock_reason":null,"body":"## Describe the bug\r\nI get `ConnectionError` when I am trying to load the SciELO dataset. \r\n\r\n\r\nWhen I try the URL with `requests` I get:\r\n```\r\n>>> requests.head(\"https:\/\/ndownloader.figstatic.com\/files\/14019287\")\r\n\r\n```\r\nAnd as far as I understand redirections in `datasets` are not supported for downloads. \r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/807341d0db0728073ab605c812c67f927d148f38\/datasets\/scielo\/scielo.py#L45 \r\n\r\n\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\ndataset = load_dataset(\"scielo\", \"en-es\")\r\n```\r\n\r\n## Expected results\r\nDownload SciELO dataset and load Dataset object\r\n\r\n\r\n## Actual results\r\n\r\n```\r\nDownloading and preparing dataset scielo\/en-es (download: 21.90 MiB, generated: 68.45 MiB, post-processed: Unknown size, total: 90.35 MiB) to \/Users\/test\/.cache\/huggingface\/datasets\/scielo\/en-es\/1.0.0\/7e05d55a20257efeb9925ff5de65bd4884fc6ddb6d765f1ea3e8860449d90e0e...\r\nTraceback (most recent call last):\r\n File \"scielo.py\", line 3, in \r\n dataset = load_dataset(\"scielo\", \"en-es\")\r\n File \"..\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 1632, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"..\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"..\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 675, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/Users\/test\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/scielo\/7e05d55a20257efeb9925ff5de65bd4884fc6ddb6d765f1ea3e8860449d90e0e\/scielo.py\", line 77, in _split_generators\r\n data_dir = dl_manager.download_and_extract(_URLS[self.config.name])\r\n File \"..\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 284, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"..\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 196, in download\r\n downloaded_path_or_paths = map_nested(\r\n File \"..\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 206, in map_nested\r\n return function(data_struct)\r\n File \"..\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 217, in _download\r\n return cached_path(url_or_filename, download_config=download_config)\r\n File \"..\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 295, in cached_path\r\n output_path = get_from_cache(\r\n File \"..\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 594, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach https:\/\/ndownloader.figstatic.com\/files\/14019287\r\n\r\n```\r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.15.1\r\n- Platform: macOS-10.16-x86_64-i386-64bit\r\n- Python version: 3.8.12\r\n- PyArrow version: 6.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3255\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3255\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3254","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3254\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3254\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3254\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3254","id":1051351172,"node_id":"PR_kwDODunzps4ubPwR","number":3254,"title":"Update xcopa dataset (fix checksum issues + add translated data)","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["The CI failures are unrelated to the changes (missing fields in the readme and the CER metric error fixed in #3252)."],"created_at":1636663893000,"updated_at":1636713058000,"closed_at":1636713057000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"This PR updates the checksums (as reported [here](https:\/\/discuss.huggingface.co\/t\/how-to-load-dataset-locally\/11601\/2)) of the `xcopa` dataset. Additionally, it adds new configs that hold the translated data of the original set of configs. This data was not available at the time of adding this dataset to the lib.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3254\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3254\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3254","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3254","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3254.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3254.patch","merged_at":1636713057000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3253","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3253\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3253\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3253\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3253","id":1051308972,"node_id":"I_kwDODunzps4-qbOs","number":3253,"title":"`GeneratorBasedBuilder` does not support `None` values","user":{"login":"pavel-lexyr","id":69010336,"node_id":"MDQ6VXNlcjY5MDEwMzM2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/69010336?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pavel-lexyr","html_url":"https:\/\/github.com\/pavel-lexyr","followers_url":"https:\/\/api.github.com\/users\/pavel-lexyr\/followers","following_url":"https:\/\/api.github.com\/users\/pavel-lexyr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pavel-lexyr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pavel-lexyr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pavel-lexyr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pavel-lexyr\/orgs","repos_url":"https:\/\/api.github.com\/users\/pavel-lexyr\/repos","events_url":"https:\/\/api.github.com\/users\/pavel-lexyr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pavel-lexyr\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi,\r\n\r\nthanks for reporting and providing a minimal reproducible example. \r\n\r\nThis line of the PR I've linked in our discussion on the Forum will add support for `None` values:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/a53de01842aac65c66a49b2439e18fa93ff73ceb\/src\/datasets\/features\/features.py#L835\r\n\r\nI expect that PR to be merged soon."],"created_at":1636660281000,"updated_at":1639060018000,"closed_at":1639060018000,"author_association":"NONE","active_lock_reason":null,"body":"## Describe the bug\r\n`GeneratorBasedBuilder` does not support `None` values.\r\n\r\n## Steps to reproduce the bug\r\nSee [this repository](https:\/\/github.com\/pavel-lexyr\/huggingface-datasets-bug-reproduction) for minimal reproduction.\r\n\r\n## Expected results\r\nDataset is initialized with a `None` value in the `value` column.\r\n\r\n## Actual results\r\n```\r\nTraceback (most recent call last):\r\n File \"main.py\", line 3, in \r\n datasets.load_dataset(\".\/bad-data\")\r\n File \"...\/datasets\/load.py\", line 1632, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"...\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"...\/datasets\/builder.py\", line 697, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"...\/datasets\/builder.py\", line 1103, in _prepare_split\r\n example = self.info.features.encode_example(record)\r\n File \"...\/datasets\/features\/features.py\", line 1033, in encode_example\r\n return encode_nested_example(self, example)\r\n File \"...\/datasets\/features\/features.py\", line 808, in encode_nested_example\r\n return {\r\n File \"...\/datasets\/features\/features.py\", line 809, in \r\n k: encode_nested_example(sub_schema, sub_obj) for k, (sub_schema, sub_obj) in utils.zip_dict(schema, obj)\r\n File \"...\/datasets\/features\/features.py\", line 855, in encode_nested_example\r\n return schema.encode_example(obj)\r\n File \"...\/datasets\/features\/features.py\", line 299, in encode_example\r\n return float(value)\r\nTypeError: float() argument must be a string or a number, not 'NoneType'\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.15.1\r\n- Platform: Linux-5.4.0-81-generic-x86_64-with-glibc2.29\r\n- Python version: 3.8.10\r\n- PyArrow version: 6.0.0","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3253\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3253\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3252","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3252\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3252\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3252\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3252","id":1051124749,"node_id":"PR_kwDODunzps4uagoy","number":3252,"title":"Fix failing CER metric test in CI after update","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636646236000,"updated_at":1636726004000,"closed_at":1636726003000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Fixes the [failing CER metric test](https:\/\/app.circleci.com\/pipelines\/github\/huggingface\/datasets\/8644\/workflows\/79816553-fa2f-4756-b022-d5937f00bf7b\/jobs\/53298) in CI by adding support for `jiwer==2.3.0`, which was released yesterday. Also, I verified that all the tests in `metrics\/cer\/test_cer.py` pass after the change, so the results should be the same irrespective of the `jiwer` version.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3252\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3252\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3252","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3252","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3252.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3252.patch","merged_at":1636726003000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3250","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3250\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3250\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3250\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3250","id":1050541348,"node_id":"PR_kwDODunzps4uYmkr","number":3250,"title":"Add ETHICS dataset","user":{"login":"ssss1029","id":7088559,"node_id":"MDQ6VXNlcjcwODg1NTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7088559?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ssss1029","html_url":"https:\/\/github.com\/ssss1029","followers_url":"https:\/\/api.github.com\/users\/ssss1029\/followers","following_url":"https:\/\/api.github.com\/users\/ssss1029\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ssss1029\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ssss1029\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ssss1029\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ssss1029\/orgs","repos_url":"https:\/\/api.github.com\/users\/ssss1029\/repos","events_url":"https:\/\/api.github.com\/users\/ssss1029\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ssss1029\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636602334000,"updated_at":1637087545000,"closed_at":null,"author_association":"NONE","active_lock_reason":null,"body":"This PR adds the ETHICS dataset, including all 5 sub-datasets.\r\nFrom https:\/\/arxiv.org\/abs\/2008.02275","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3250\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":1},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3250\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3250","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3250","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3250.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3250.patch","merged_at":null},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3249","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3249\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3249\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3249\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3249","id":1050193138,"node_id":"PR_kwDODunzps4uXeea","number":3249,"title":"Fix streaming for id_newspapers_2018","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636570530000,"updated_at":1636725692000,"closed_at":1636725691000,"author_association":"MEMBER","active_lock_reason":null,"body":"To be compatible with streaming, this dataset must use `dl_manager.iter_archive` since the data are in a .tgz file","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3249\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3249\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3249","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3249","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3249.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3249.patch","merged_at":1636725691000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3248","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3248\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3248\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3248\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3248","id":1050171082,"node_id":"PR_kwDODunzps4uXZzU","number":3248,"title":"Stream from Google Drive and other hosts","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["I just tried some datasets and noticed that `spider` is not working for some reason (the compression type is not recognized), resulting in FileNotFoundError. I can take a look tomorrow","I'm fixing the remaining files based on TAR archives","THANKS A LOT"],"created_at":1636569152000,"updated_at":1638288223000,"closed_at":1636737491000,"author_association":"MEMBER","active_lock_reason":null,"body":"Streaming from Google Drive is a bit more challenging than the other host we've been supporting:\r\n- the download URL must be updated to add the confirm token obtained by HEAD request\r\n- it requires to use cookies to keep the connection alive\r\n- the URL doesn't tell any information about whether the file is compressed or not\r\n\r\nTherefore I did two things:\r\n- I added a step for URL and headers\/cookies preparation in the StreamingDownloadManager\r\n- I added automatic compression type inference by reading the [magic number](https:\/\/en.wikipedia.org\/wiki\/List_of_file_signatures)\r\n\r\nThis allows to do do fancy things like\r\n```python\r\nfrom datasets.utils.streaming_download_manager import StreamingDownloadManager, xopen, xjoin, xglob\r\n\r\n# zip file containing a train.tsv file\r\nurl = \"https:\/\/drive.google.com\/uc?export=download&id=1k92sUfpHxKq8PXWRr7Y5aNHXwOCNUmqh\"\r\n\r\nextracted = StreamingDownloadManager().download_and_extract(url)\r\nfor inner_file in xglob(xjoin(extracted, \"*.tsv\")):\r\n with xopen(inner_file) as f:\r\n # streaming starts here\r\n for line in f:\r\n print(line)\r\n```\r\n\r\nThis should make around 80 datasets streamable. It concerns those hosted on Google Drive but also any dataset for which the URL doesn't give any information about compression. Here is the full list:\r\n\r\n```\r\namazon_polarity, ami, arabic_billion_words, ascent_kb, asset, big_patent, billsum, capes, cmrc2018, cnn_dailymail,\r\ncode_x_glue_cc_code_completion_token, code_x_glue_cc_code_refinement, code_x_glue_cc_code_to_code_trans,\r\ncode_x_glue_tt_text_to_text, conll2002, craigslist_bargains, dbpedia_14, docred, ehealth_kd, emo, euronews, germeval_14,\r\ngigaword, grail_qa, great_code, has_part, head_qa, health_fact, hope_edi, id_newspapers_2018,\r\nigbo_english_machine_translation, irc_disentangle, jfleg, jnlpba, journalists_questions, kor_ner, linnaeus, med_hop, mrqa,\r\nmt_eng_vietnamese, multi_news, norwegian_ner, offcombr, offenseval_dravidian, para_pat, peoples_daily_ner, pn_summary,\r\npoleval2019_mt, pubmed_qa, qangaroo, reddit_tifu, refresd, ro_sts_parallel, russian_super_glue, samsum, sberquad, scielo,\r\nsearch_qa, species_800, spider, squad_adversarial, tamilmixsentiment, tashkeela, ted_talks_iwslt, trec, turk, turkish_ner,\r\ntwi_text_c3, universal_morphologies, web_of_science, weibo_ner, wiki_bio, wiki_hop, wiki_lingua, wiki_summary, wili_2018,\r\nwisesight1000, wnut_17, yahoo_answers_topics, yelp_review_full, yoruba_text_c3\r\n```\r\n\r\nSome of them may not work if the host doesn't support HTTP range requests for example\r\n\r\nFix https:\/\/github.com\/huggingface\/datasets\/issues\/2742\r\nFix https:\/\/github.com\/huggingface\/datasets\/issues\/3188","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3248\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":2,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3248\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3248","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3248","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3248.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3248.patch","merged_at":1636737490000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3247","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3247\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3247\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3247\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3247","id":1049699088,"node_id":"I_kwDODunzps4-kSMQ","number":3247,"title":"Loading big json dataset raises pyarrow.lib.ArrowNotImplementedError","user":{"login":"maxzirps","id":29249513,"node_id":"MDQ6VXNlcjI5MjQ5NTEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29249513?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/maxzirps","html_url":"https:\/\/github.com\/maxzirps","followers_url":"https:\/\/api.github.com\/users\/maxzirps\/followers","following_url":"https:\/\/api.github.com\/users\/maxzirps\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/maxzirps\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/maxzirps\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/maxzirps\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/maxzirps\/orgs","repos_url":"https:\/\/api.github.com\/users\/maxzirps\/repos","events_url":"https:\/\/api.github.com\/users\/maxzirps\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/maxzirps\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi,\r\n\r\nthis issue is similar to https:\/\/github.com\/huggingface\/datasets\/issues\/3093, so you can either use the solution provided there or try to load the data in one chunk (you can control the chunk size by specifying the `chunksize` parameter (`int`) in `load_dataset`).\r\n\r\n@lhoestq Is this worth opening an issue on Jira? Basically, PyArrow doesn't allow casts that change the order of the struct fields because they treat `pa.struct` as an ordered sequence. Reordering fields manually in Python is probably too slow, so I think this needs to be fixed by them to be usable on our side.","I agree I would expect PyArrow to be able to handle this, do you want to open the issue @mariosasko ?\r\nAlthough maybe it's possible to fix struct casting on our side without hurting performance too much, if it's simply a matter of reordering the arrays in the StructArray"],"created_at":1636543079000,"updated_at":1636712753000,"closed_at":null,"author_association":"NONE","active_lock_reason":null,"body":"## Describe the bug\r\nWhen trying to create a dataset from a json file with around 25MB, the following error is raised `pyarrow.lib.ArrowNotImplementedError: Unsupported cast from struct to struct using function cast_struct`\r\n\r\nSplitting the big file into smaller ones and then loading it with the `load_dataset` method did also not work.\r\n\r\nCreating a pandas dataframe from it and then loading it with `Dataset.from_pandas` works\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nload_dataset(\"json\", data_files=\"test.json\")\r\n```\r\n\r\ntest.json ~25MB\r\n```json\r\n{\"a\": {\"c\": 8, \"b\": 5}}\r\n{\"a\": {\"b\": 7, \"c\": 6}}\r\n{\"a\": {\"c\": 8, \"b\": 5}}\r\n{\"a\": {\"b\": 7, \"c\": 6}}\r\n{\"a\": {\"c\": 8, \"b\": 5}}\r\n...\r\n```\r\n\r\nworking.json ~160bytes\r\n```json\r\n{\"a\": {\"c\": 8, \"b\": 5}}\r\n{\"a\": {\"b\": 7, \"c\": 6}}\r\n{\"a\": {\"c\": 8, \"b\": 5}}\r\n{\"a\": {\"b\": 7, \"c\": 6}}\r\n{\"a\": {\"c\": 8, \"b\": 5}}\r\n```\r\n\r\n## Expected results\r\nIt should load the dataset from the json file without error.\r\n\r\n## Actual results\r\nIt raises Exception `pyarrow.lib.ArrowNotImplementedError: Unsupported cast from struct to struct using function cast_struct`\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"\/Users\/m\/workspace\/xxx\/project\/main.py\", line 60, in \r\n dataset = load_dataset(\"json\", data_files=\"result.json\")\r\n File \"\/opt\/homebrew\/Caskroom\/miniforge\/base\/envs\/xxx\/lib\/python3.9\/site-packages\/datasets\/load.py\", line 1627, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/opt\/homebrew\/Caskroom\/miniforge\/base\/envs\/xxx\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/opt\/homebrew\/Caskroom\/miniforge\/base\/envs\/xxx\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 697, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/opt\/homebrew\/Caskroom\/miniforge\/base\/envs\/xxx\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 1159, in _prepare_split\r\n writer.write_table(table)\r\n File \"\/opt\/homebrew\/Caskroom\/miniforge\/base\/envs\/xxx\/lib\/python3.9\/site-packages\/datasets\/arrow_writer.py\", line 428, in write_table\r\n pa_table = pa.Table.from_arrays([pa_table[name] for name in self._schema.names], schema=self._schema)\r\n File \"pyarrow\/table.pxi\", line 1685, in pyarrow.lib.Table.from_arrays\r\n File \"pyarrow\/table.pxi\", line 630, in pyarrow.lib._sanitize_arrays\r\n File \"pyarrow\/array.pxi\", line 338, in pyarrow.lib.asarray\r\n File \"pyarrow\/table.pxi\", line 304, in pyarrow.lib.ChunkedArray.cast\r\n File \"\/opt\/homebrew\/Caskroom\/miniforge\/base\/envs\/xxx\/lib\/python3.9\/site-packages\/pyarrow\/compute.py\", line 309, in cast\r\n return call_function(\"cast\", [arr], options)\r\n File \"pyarrow\/_compute.pyx\", line 528, in pyarrow._compute.call_function\r\n File \"pyarrow\/_compute.pyx\", line 327, in pyarrow._compute.Function.call\r\n File \"pyarrow\/error.pxi\", line 143, in pyarrow.lib.pyarrow_internal_check_status\r\n File \"pyarrow\/error.pxi\", line 120, in pyarrow.lib.check_status\r\npyarrow.lib.ArrowNotImplementedError: Unsupported cast from struct to struct using function cast_struct\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.14.0\r\n- Platform: macOS-12.0.1-arm64-arm-64bit\r\n- Python version: 3.9.7\r\n- PyArrow version: 6.0.0\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3247\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3247\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3246","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3246\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3246\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3246\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3246","id":1049662746,"node_id":"PR_kwDODunzps4uVvaW","number":3246,"title":"[tiny] fix typo in stream docs","user":{"login":"nollied","id":26421036,"node_id":"MDQ6VXNlcjI2NDIxMDM2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26421036?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nollied","html_url":"https:\/\/github.com\/nollied","followers_url":"https:\/\/api.github.com\/users\/nollied\/followers","following_url":"https:\/\/api.github.com\/users\/nollied\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nollied\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nollied\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nollied\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nollied\/orgs","repos_url":"https:\/\/api.github.com\/users\/nollied\/repos","events_url":"https:\/\/api.github.com\/users\/nollied\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nollied\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636540802000,"updated_at":1636542639000,"closed_at":1636542639000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3246\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3246\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3246","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3246","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3246.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3246.patch","merged_at":1636542639000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3245","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3245\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3245\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3245\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3245","id":1048726062,"node_id":"PR_kwDODunzps4uSqqq","number":3245,"title":"Fix load_from_disk temporary directory","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636470915000,"updated_at":1636471852000,"closed_at":1636471851000,"author_association":"MEMBER","active_lock_reason":null,"body":"`load_from_disk` uses `tempfile.TemporaryDirectory()` instead of our `get_temporary_cache_files_directory()` function. This can cause the temporary directory to be deleted before the dataset object is garbage collected.\r\n\r\nIn practice, it prevents anyone from using methods like `shuffle` on a dataset loaded this way, because it can't write the shuffled indices in a directory that doesn't exist anymore.\r\n\r\nIn this PR I switch to using `get_temporary_cache_files_directory()` and I update the tests.\r\n\r\ncc @mariosasko since you worked on `get_temporary_cache_files_directory()`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3245\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3245\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3245","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3245","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3245.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3245.patch","merged_at":1636471851000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3244","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3244\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3244\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3244\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3244","id":1048675741,"node_id":"PR_kwDODunzps4uSgG5","number":3244,"title":"Fix filter method for batched=True","user":{"login":"thomasw21","id":24695242,"node_id":"MDQ6VXNlcjI0Njk1MjQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24695242?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomasw21","html_url":"https:\/\/github.com\/thomasw21","followers_url":"https:\/\/api.github.com\/users\/thomasw21\/followers","following_url":"https:\/\/api.github.com\/users\/thomasw21\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomasw21\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomasw21\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomasw21\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomasw21\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomasw21\/repos","events_url":"https:\/\/api.github.com\/users\/thomasw21\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomasw21\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636468259000,"updated_at":1636473178000,"closed_at":1636473177000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3244\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3244\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3244","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3244","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3244.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3244.patch","merged_at":1636473177000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3243","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3243\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3243\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3243\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3243","id":1048630754,"node_id":"PR_kwDODunzps4uSWtB","number":3243,"title":"Remove redundant isort module placement","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636465830000,"updated_at":1636725765000,"closed_at":1636725765000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"`isort` can place modules by itself from [version 5.0.0](https:\/\/pycqa.github.io\/isort\/docs\/upgrade_guides\/5.0.0.html#module-placement-changes-known_third_party-known_first_party-default_section-etc) onwards, making the `known_first_party` and `known_third_party` fields in `setup.cfg` redundant (this is why our CI works, even though we haven't touched these options in a while).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3243\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3243\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3243","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3243","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3243.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3243.patch","merged_at":1636725765000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3242","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3242\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3242\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3242\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3242","id":1048527232,"node_id":"I_kwDODunzps4-f0GA","number":3242,"title":"Adding ANERcorp-CAMeLLab dataset","user":{"login":"vitalyshalumov","id":33824221,"node_id":"MDQ6VXNlcjMzODI0MjIx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33824221?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vitalyshalumov","html_url":"https:\/\/github.com\/vitalyshalumov","followers_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/followers","following_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/orgs","repos_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/repos","events_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Adding ANERcorp dataset\r\n\r\n## Adding a Dataset\r\n- **Name:** *ANERcorp-CAMeLLab*\r\n- **Description:** *Since its creation in 2008, the ANERcorp dataset (Benajiba & Rosso, 2008) has been a standard reference used by Arabic named entity recognition researchers around the world. However, over time, this dataset was copied over from user to user, modified slightly here and there, and split in many different configurations that made it hard to compare fairly across papers and systems.\r\n\r\nIn 2020, a group of researchers from CAMeL Lab (Habash, Alhafni and Oudah), and Mind Lab (Antoun and Baly) met with the creator of the corpus, Yassine Benajiba, to consult with him and collectively agree on an exact split, and accepted minor corrections from the original dataset. Bashar Alhafni from CAMeL Lab working with Nizar Habash implemented the decisions provided in this release.*\r\n\r\n- **Paper:** *(a) Benajiba, Yassine, Paolo Rosso, and Jos\u00e9 Miguel Bened\u00ed Ruiz. \"Anersys: An Arabic named entity recognition system based on maximum entropy.\" In International Conference on Intelligent Text Processing and Computational Linguistics, pp. 143-153. Springer, Berlin, Heidelberg, 2007.\r\n\r\n(b)Ossama Obeid, Nasser Zalmout, Salam Khalifa, Dima Taji, Mai Oudah, Bashar Alhafni, Go Inoue, Fadhl Eryani, Alexander Erdmann, and Nizar Habash. \"CAMeL Tools: An Open Source Python Toolkit, for Arabic Natural Language Processing.\" In Proceedings of the Conference on Language Resources and Evaluation (LREC 2020), Marseille, 2020.*\r\n- **Data:** *https:\/\/camel.abudhabi.nyu.edu\/anercorp\/*\r\n- **Motivation:** This is the standard dataset for evaluating NER performance in Arabic*\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md)."],"created_at":1636459444000,"updated_at":1636461675000,"closed_at":null,"author_association":"NONE","active_lock_reason":null,"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3242\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3242\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3241","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3241\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3241\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3241\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3241","id":1048461852,"node_id":"PR_kwDODunzps4uRzHa","number":3241,"title":"Swap descriptions of v1 and raw-v1 configs of WikiText dataset and fix metadata","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636455255000,"updated_at":1636465769000,"closed_at":1636465768000,"author_association":"MEMBER","active_lock_reason":null,"body":"Fix #3237.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3241\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3241\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3241","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3241","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3241.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3241.patch","merged_at":1636465768000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3240","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3240\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3240\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3240\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3240","id":1048376021,"node_id":"I_kwDODunzps4-fPLV","number":3240,"title":"Couldn't reach data file for disaster_response_messages","user":{"login":"pandya6988","id":81331791,"node_id":"MDQ6VXNlcjgxMzMxNzkx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/81331791?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pandya6988","html_url":"https:\/\/github.com\/pandya6988","followers_url":"https:\/\/api.github.com\/users\/pandya6988\/followers","following_url":"https:\/\/api.github.com\/users\/pandya6988\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pandya6988\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pandya6988\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pandya6988\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pandya6988\/orgs","repos_url":"https:\/\/api.github.com\/users\/pandya6988\/repos","events_url":"https:\/\/api.github.com\/users\/pandya6988\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pandya6988\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["It looks like the dataset isn't available anymore on appen.com\r\n\r\nThe CSV files appear to still be available at https:\/\/www.kaggle.com\/landlord\/multilingual-disaster-response-messages though. It says that the data are under the CC0 license so I guess we can host the dataset elsewhere instead ?"],"created_at":1636450002000,"updated_at":1639492709000,"closed_at":1639492709000,"author_association":"NONE","active_lock_reason":null,"body":"## Describe the bug\r\nFollowing command gives an ConnectionError.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\ndisaster = load_dataset('disaster_response_messages')\r\n```\r\n\r\n## Error\r\n```\r\nConnectionError: Couldn't reach https:\/\/datasets.appen.com\/appen_datasets\/disaster_response_data\/disaster_response_messages_training.csv\r\n```\r\n## Expected results\r\nIt should load dataset without an error\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\r\n## Environment info\r\n\r\n- `datasets` version:\r\n- Platform: Google Colab\r\n- Python version: 3.7\r\n- PyArrow version: \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3240\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3240\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3239","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3239\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3239\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3239\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3239","id":1048360232,"node_id":"I_kwDODunzps4-fLUo","number":3239,"title":"Inconsistent performance of the \"arabic_billion_words\" dataset","user":{"login":"vitalyshalumov","id":33824221,"node_id":"MDQ6VXNlcjMzODI0MjIx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33824221?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vitalyshalumov","html_url":"https:\/\/github.com\/vitalyshalumov","followers_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/followers","following_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/orgs","repos_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/repos","events_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636449060000,"updated_at":1636449060000,"closed_at":null,"author_association":"NONE","active_lock_reason":null,"body":"## Describe the bug\r\nWhen downloaded from macine 1 the dataset is downloaded and parsed correctly.\r\nWhen downloaded from machine two (which has a different cache directory),\r\nthe following script:\r\n\r\nimport datasets\r\nfrom datasets import load_dataset\r\nraw_dataset_elkhair_1 = load_dataset('arabic_billion_words', 'Alittihad', split=\"train\",download_mode='force_redownload')\r\n\r\ngives the following error:\r\n\r\n**Downloading and preparing dataset arabic_billion_words\/Alittihad (download: 332.13 MiB, generated: 1.49 GiB, post-processed: Unknown size, total: 1.82 GiB) to \/root\/.cache\/huggingface\/datasets\/arabic_billion_words\/Alittihad\/1.1.0\/687a1f963284c8a766558661375ea8f7ab3fa3633f8cd9c9f42a53ebe83bfe17...\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 348M\/348M [00:24<00:00, 14.0MB\/s]\r\nTraceback (most recent call last):\r\n File \"...\/why_mismatch.py\", line 3, in \r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 1632, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 709, in _download_and_prepare\r\n verify_splits(self.info.splits, split_dict)\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/info_utils.py\", line 74, in verify_splits\r\n raise NonMatchingSplitsSizesError(str(bad_splits))\r\ndatasets.utils.info_utils.NonMatchingSplitsSizesError: [{'expected': SplitInfo(name='train', num_bytes=1601790302, num_examples=349342, dataset_name='arabic_billion_words'), 'recorded': SplitInfo(name='train', num_bytes=0, num_examples=0, dataset_name='arabic_billion_words')}]**\r\n\r\n\r\n\r\n\r\nNote that the package versions of datasets (1.15.1) and rarfile (4.0) are identical.\r\n\r\n\r\n## Steps to reproduce the bug\r\nimport datasets\r\nfrom datasets import load_dataset\r\nraw_dataset_elkhair_1 = load_dataset('arabic_billion_words', 'Alittihad', split=\"train\",download_mode='force_redownload')\r\n\r\n\r\n# Sample code to reproduce the bug\r\n\r\n## Expected results\r\nDownloading and preparing dataset arabic_billion_words\/Alittihad (download: 332.13 MiB, generated: 1.49 GiB, post-processed: Unknown size, total: 1.82 GiB) to ...\/.cache\/huggingface\/datasets\/arabic_billion_words\/Alittihad\/1.1.0\/687a1f963284c8a766558661375ea8f7ab3fa3633f8cd9c9f42a53ebe83bfe17...\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 348M\/348M [00:22<00:00, 15.8MB\/s]\r\nDataset arabic_billion_words downloaded and prepared to ...\/.cache\/huggingface\/datasets\/arabic_billion_words\/Alittihad\/1.1.0\/687a1f963284c8a766558661375ea8f7ab3fa3633f8cd9c9f42a53ebe83bfe17. Subsequent calls will reuse this data.\r\n\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\r\n## Environment info\r\n\r\nMachine 1:\r\n- `datasets` version: 1.15.1\r\n- Platform: Linux-5.8.0-63-generic-x86_64-with-glibc2.29\r\n- Python version: 3.8.10\r\n- PyArrow version: 4.0.1\r\n\r\nMachine 2 (the bugged one)\r\n- `datasets` version: 1.15.1\r\n- Platform: Linux-4.4.0-210-generic-x86_64-with-glibc2.10\r\n- Python version: 3.8.8\r\n- PyArrow version: 6.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3239\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3239\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3238","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3238\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3238\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3238\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3238","id":1048226086,"node_id":"I_kwDODunzps4-eqkm","number":3238,"title":"Reuters21578 Couldn't reach ","user":{"login":"TingNLP","id":54096137,"node_id":"MDQ6VXNlcjU0MDk2MTM3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/54096137?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TingNLP","html_url":"https:\/\/github.com\/TingNLP","followers_url":"https:\/\/api.github.com\/users\/TingNLP\/followers","following_url":"https:\/\/api.github.com\/users\/TingNLP\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TingNLP\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TingNLP\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TingNLP\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TingNLP\/orgs","repos_url":"https:\/\/api.github.com\/users\/TingNLP\/repos","events_url":"https:\/\/api.github.com\/users\/TingNLP\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TingNLP\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi ! The URL works fine on my side today, could you try again ?","thank you @lhoestq \r\nit works"],"created_at":1636438136000,"updated_at":1636588977000,"closed_at":1636588977000,"author_association":"NONE","active_lock_reason":null,"body":"``## Adding a Dataset\r\n- **Name:** *Reuters21578*\r\n- **Description:** *ConnectionError: Couldn't reach https:\/\/kdd.ics.uci.edu\/databases\/reuters21578\/reuters21578.tar.gz*\r\n- **Data:** *https:\/\/huggingface.co\/datasets\/reuters21578*\r\n\r\n`from datasets import load_dataset`\r\n`dataset = load_dataset(\"reuters21578\", 'ModLewis')`\r\n\r\nConnectionError: Couldn't reach https:\/\/kdd.ics.uci.edu\/databases\/reuters21578\/reuters21578.tar.gz\r\n\r\nAnd I try to request the link as follow:\r\n`import requests`\r\n`requests.head('https:\/\/kdd.ics.uci.edu\/databases\/reuters21578\/reuters21578.tar.gz')`\r\n\r\nSSLError: HTTPSConnectionPool(host='kdd.ics.uci.edu', port=443): Max retries exceeded with url: \/databases\/reuters21578\/reuters21578.tar.gz (Caused by SSLError(SSLError(1, '[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed (_ssl.c:852)'),))\r\n\r\nThis problem likes #575\r\nWhat should I do ?\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3238\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3238\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3237","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3237\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3237\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3237\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3237","id":1048165525,"node_id":"I_kwDODunzps4-ebyV","number":3237,"title":"wikitext description wrong","user":{"login":"hongyuanmei","id":19693633,"node_id":"MDQ6VXNlcjE5NjkzNjMz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19693633?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hongyuanmei","html_url":"https:\/\/github.com\/hongyuanmei","followers_url":"https:\/\/api.github.com\/users\/hongyuanmei\/followers","following_url":"https:\/\/api.github.com\/users\/hongyuanmei\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hongyuanmei\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hongyuanmei\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hongyuanmei\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hongyuanmei\/orgs","repos_url":"https:\/\/api.github.com\/users\/hongyuanmei\/repos","events_url":"https:\/\/api.github.com\/users\/hongyuanmei\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hongyuanmei\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi @hongyuanmei, thanks for reporting.\r\n\r\nI'm fixing it."],"created_at":1636430812000,"updated_at":1636465768000,"closed_at":1636465768000,"author_association":"NONE","active_lock_reason":null,"body":"## Describe the bug\r\nDescriptions of the wikitext datasests are wrong. \r\n\r\n## Steps to reproduce the bug\r\nPlease see: https:\/\/github.com\/huggingface\/datasets\/blob\/f6dcafce996f39b6a4bbe3a9833287346f4a4b68\/datasets\/wikitext\/wikitext.py#L50\r\n\r\n## Expected results\r\nThe descriptions for raw-v1 and v1 should be switched. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3237\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3237\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3236","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3236\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3236\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3236\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3236","id":1048026358,"node_id":"I_kwDODunzps4-d5z2","number":3236,"title":"Loading of datasets changed in #3110 returns no examples ","user":{"login":"eladsegal","id":13485709,"node_id":"MDQ6VXNlcjEzNDg1NzA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13485709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eladsegal","html_url":"https:\/\/github.com\/eladsegal","followers_url":"https:\/\/api.github.com\/users\/eladsegal\/followers","following_url":"https:\/\/api.github.com\/users\/eladsegal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eladsegal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eladsegal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eladsegal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eladsegal\/orgs","repos_url":"https:\/\/api.github.com\/users\/eladsegal\/repos","events_url":"https:\/\/api.github.com\/users\/eladsegal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eladsegal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi @eladsegal, thanks for reporting.\r\n\r\nI am sorry, but I can't reproduce the bug:\r\n```\r\nIn [1]: from datasets import load_dataset\r\n\r\nIn [2]: ds = load_dataset(\"qasper\")\r\nDownloading: 5.11kB [00:00, ?B\/s]\r\nDownloading and preparing dataset qasper\/qasper (download: 9.88 MiB, generated: 35.11 MiB, post-processed: Unknown size, total: 44.99 MiB) to .cache\\qasper\\qasper\\0.1.0\\b99154d2a15aa54bfc669f82b2eda715a2e342e81023d39613b0e2920fdb3ad8...\r\nDataset qasper downloaded and prepared to .cache\\qasper\\qasper\\0.1.0\\b99154d2a15aa54bfc669f82b2eda715a2e342e81023d39613b0e2920fdb3ad8. Subsequent calls will reuse this data.\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2\/2 [00:00, ?it\/s]\r\n\r\nIn [3]: ds\r\nOut[3]:\r\nDatasetDict({\r\n train: Dataset({\r\n features: ['id', 'title', 'abstract', 'full_text', 'qas'],\r\n num_rows: 888\r\n })\r\n validation: Dataset({\r\n features: ['id', 'title', 'abstract', 'full_text', 'qas'],\r\n num_rows: 281\r\n })\r\n})\r\n``` \r\n\r\nThis makes me suspect that the origin of the problem might be the cache: I didn't have this dataset in my cache, although I guess you already had it, before the code change introduced by #3110.\r\n\r\n@lhoestq might it be possible that the code change introduced by #3110 makes \"inaccessible\" all previously cached TAR-based datasets?\r\n- Before the caching system downloaded and extracted the tar dataset\r\n- Now it only downloads the tar dataset (no extraction is done)","I can't reproduce either in my environment (macos, python 3.7).\r\n\r\nIn your case it generates zero examples. This can only happen if the extraction of the TAR archive doesn't output the right filenames. Indeed if the `qasper` script can't find the right file to load, it's currently ignored and it returns zero examples. This case was not even considered when #3110 was developed since we considered the file names to be deterministic - and not depend on your environment.\r\n\r\nTherefore here is my hypothesis:\r\n- either the cache is corrupted somehow with an empty TAR archive\r\n- OR I suspect that the issue comes from python 3.8\r\n","I just tried again on python 3.8 and I was able to reproduce the issue. Let me work on a fix","Ok I found the issue. It's not related to python 3.8 in itself though. This issue happens because your local installation of `datasets` is outdated compared to the changes to datasets in #3110\r\n\r\nTo fix this you just have to pull the latest changes from `master` :)\r\n\r\nLet me know if that helps !\r\n\r\n--------------\r\n\r\nHere are more details about my investigation:\r\n\r\nIt's possible to reproduce this issue if you use `datasets<=1.15.1` or before b6469baa22c174b3906c631802a7016fedea6780 and if you load the dataset after revision b6469baa22c174b3906c631802a7016fedea6780. This is because `dl_manager.iter_archive` had issues at that time (and it was not used anywhere anyway).\r\n\r\nIn particular it was returning the absolute path to extracted files instead of the relative path of the file inside the archive. This was an issue because `dl_manager.iter_archive` isn't supposed to extract the TAR archive. Instead, it iterates over all the files inside the archive, without creating a directory with the extracted content.\r\n\r\nTherefore if you want to use the datasets on `master`, make sure that you have an up-to-date local installation of `datasets` as well, or you may face incompatibilities like this.","Thanks!\r\nBut what about code that is already using older version of datasets? \r\nThe reason I encountered this issue was that suddenly one of my repos with version 1.12.1 started getting 0 examples.\r\nI handled it by adding `revision` to `load_dataset`, but I guess it would still be an issue for other users who doesn't know this.","Hi, in 1.12.1 it uses the dataset scripts from that time, not the one on master.\r\n\r\nIt only uses the datasets from master if you installed `datasets` from source, or if the dataset isn't available in your local version (in this case it shows a warning and it loads from master).\r\n","OK, I understand the issue a bit better now.\r\nI see I wasn't on 1.12.1, but on 1.12.1.dev0 and since it is a dev version it uses master.\r\nSo users that use an old dev version must specify revision or else they'll encounter this problem.\r\n\r\nBTW, when I opened the issue I installed the latest master version with\r\n```\r\npip install git+git:\/\/github.com\/huggingface\/datasets@master#egg=datasets\r\n```\r\nand also used `download_mode=\"force_redownload\"`, and it still returned 0 examples.\r\nNow I deleted all of the cache and ran the code again, and it worked.\r\nI'm not sure what exactly happened here, but looks like it was due to a mix of an unofficial version and its cache.\r\n\r\nThanks again!"],"created_at":1636414186000,"updated_at":1636476365000,"closed_at":1636476347000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"## Describe the bug\r\nLoading of datasets changed in https:\/\/github.com\/huggingface\/datasets\/pull\/3110 returns no examples:\r\n```python\r\nDatasetDict({\r\n train: Dataset({\r\n features: ['id', 'title', 'abstract', 'full_text', 'qas'],\r\n num_rows: 0\r\n })\r\n validation: Dataset({\r\n features: ['id', 'title', 'abstract', 'full_text', 'qas'],\r\n num_rows: 0\r\n })\r\n})\r\n```\r\n\r\n## Steps to reproduce the bug\r\nLoad any of the datasets that were changed in https:\/\/github.com\/huggingface\/datasets\/pull\/3110:\r\n```python\r\nfrom datasets import load_dataset\r\nload_dataset(\"qasper\")\r\n\r\n# The problem only started with the commit of #3110\r\nload_dataset(\"qasper\", revision=\"b6469baa22c174b3906c631802a7016fedea6780\")\r\n```\r\n\r\n## Expected results\r\n```python\r\nDatasetDict({\r\n train: Dataset({\r\n features: ['id', 'title', 'abstract', 'full_text', 'qas'],\r\n num_rows: 888\r\n })\r\n validation: Dataset({\r\n features: ['id', 'title', 'abstract', 'full_text', 'qas'],\r\n num_rows: 281\r\n })\r\n})\r\n```\r\nWhich can be received when specifying revision of the commit before https:\/\/github.com\/huggingface\/datasets\/pull\/3110:\r\n```python\r\nfrom datasets import load_dataset\r\nload_dataset(\"qasper\", revision=\"acfe2abda1ca79f0ce5c1896aa83b4b78af76b7d\")\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.15.2.dev0 (master)\r\n- Python version: 3.8.10\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3236\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3236\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3235","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3235\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3235\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3235\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3235","id":1047808263,"node_id":"PR_kwDODunzps4uPr9Z","number":3235,"title":"Addd options to use updated bleurt checkpoints","user":{"login":"jaehlee","id":11873078,"node_id":"MDQ6VXNlcjExODczMDc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11873078?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jaehlee","html_url":"https:\/\/github.com\/jaehlee","followers_url":"https:\/\/api.github.com\/users\/jaehlee\/followers","following_url":"https:\/\/api.github.com\/users\/jaehlee\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jaehlee\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jaehlee\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jaehlee\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jaehlee\/orgs","repos_url":"https:\/\/api.github.com\/users\/jaehlee\/repos","events_url":"https:\/\/api.github.com\/users\/jaehlee\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jaehlee\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636397634000,"updated_at":1636725928000,"closed_at":1636725928000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Adds options to use newer recommended checkpoint (as of 2021\/10\/8) bleurt-20 and its distilled versions. \r\n\r\nUpdated checkpoints are described in https:\/\/github.com\/google-research\/bleurt\/blob\/master\/checkpoints.md#the-recommended-checkpoint-bleurt-20\r\n\r\nThis change won't affect the default behavior of metrics\/bleurt. It only adds option to load newer checkpoints as\r\n\r\n`datasets.load_metric('bleurt', 'bleurt-20')`\r\n\r\n`bluert-20` generates scores roughly between 0 and 1, which wasn't the case for the previous checkpoints. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3235\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3235\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3235","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3235","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3235.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3235.patch","merged_at":1636725928000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3234","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3234\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3234\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3234\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3234","id":1047634236,"node_id":"PR_kwDODunzps4uPHRk","number":3234,"title":"Avoid PyArrow type optimization if it fails","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["That's good to have a way to disable this easily :)\r\nI just find it a bit unfortunate that users would have to experience the error once and then do `DISABLE_PYARROW_TYPES_OPTIMIZATION=1`. Do you know if there's a way to simply fallback on disabling it automatically when it fails ?","@lhoestq Actually, I agree a fallback makes more sense. The current approach is not very practical indeed and would require a mention in the docs.\r\n","Replaced the env variable with a fallback!","Hmm if the fallback automatically happens without the user knowing it, then I don't think we really need to mention it. But if you really wanted to, I think the [Improve performance](https:\/\/huggingface.co\/docs\/datasets\/cache.html#improve-performance) section would be a great place for it! ","Yea I think this could just end up in a note that says that `datasets` automatically picks the most optimized integer precision for your tokenized text data to save you disk space. Maybe later if we have a page on text processing we can add this note, but for now I agree it doesn't fit well into the doc.\r\n\r\nIn particular in the \"Improve performance\" section we mention what users can do to speed up their computations, while this behavior is just some internal feature that users don't have control over anyway."],"created_at":1636387827000,"updated_at":1636545869000,"closed_at":1636545868000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Adds a new variable, `DISABLE_PYARROW_TYPES_OPTIMIZATION`, to `config.py` for easier control of the Arrow type optimization.\r\n\r\nFix #2206 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3234\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3234\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3234","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3234","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3234.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3234.patch","merged_at":1636545868000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3233","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3233\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3233\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3233\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3233","id":1047474931,"node_id":"PR_kwDODunzps4uOl9-","number":3233,"title":"Improve repository structure docs","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636379495000,"updated_at":1636452138000,"closed_at":1636452137000,"author_association":"MEMBER","active_lock_reason":null,"body":"Continuation of the documentation started in https:\/\/github.com\/huggingface\/datasets\/pull\/3221, taking into account @stevhliu 's comments","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3233\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3233\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3233","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3233","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3233.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3233.patch","merged_at":1636452137000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3232","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3232\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3232\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3232\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3232","id":1047361573,"node_id":"I_kwDODunzps4-bXgl","number":3232,"title":"The Xsum datasets seems not able to download.","user":{"login":"FYYFU","id":37999885,"node_id":"MDQ6VXNlcjM3OTk5ODg1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/37999885?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/FYYFU","html_url":"https:\/\/github.com\/FYYFU","followers_url":"https:\/\/api.github.com\/users\/FYYFU\/followers","following_url":"https:\/\/api.github.com\/users\/FYYFU\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/FYYFU\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/FYYFU\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/FYYFU\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/FYYFU\/orgs","repos_url":"https:\/\/api.github.com\/users\/FYYFU\/repos","events_url":"https:\/\/api.github.com\/users\/FYYFU\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/FYYFU\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi ! On my side the URL is working fine, could you try again ?","> Hi ! On my side the URL is working fine, could you try again ?\r\n\r\nI try it again and cannot download the file (might because of my location). Could you please provide another download link(such as google drive)? :>","I don't know other download links - this is the one provided by the authors of the dataset. Maybe you can try downloading from another location ? There are several solutions: a VPN, a remote VM or Google Colab for example.","> I don't know other download links - this is the one provided by the authors of the dataset. Maybe you can try downloading from another location ? There are several solutions: a VPN, a remote VM or Google Colab for example.\r\n\r\n:> ok. Thanks for your reply."],"created_at":1636372734000,"updated_at":1636470436000,"closed_at":1636470436000,"author_association":"NONE","active_lock_reason":null,"body":"## Describe the bug\r\nThe download Link of the Xsum dataset provided in the repository is [Link](http:\/\/bollin.inf.ed.ac.uk\/public\/direct\/XSUM-EMNLP18-Summary-Data-Original.tar.gz). It seems not able to download.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nload_dataset('xsum')\r\n```\r\n\r\n\r\n## Actual results\r\n``` python\r\nraise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach http:\/\/bollin.inf.ed.ac.uk\/public\/direct\/XSUM-EMNLP18-Summary-Data-Original.tar.gz\r\n```\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3232\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3232\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3231","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3231\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3231\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3231\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3231","id":1047170906,"node_id":"PR_kwDODunzps4uNmWT","number":3231,"title":"Group tests in multiprocessing workers by test file","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636361163000,"updated_at":1636377558000,"closed_at":1636361984000,"author_association":"MEMBER","active_lock_reason":null,"body":"By grouping tests by test file, we make sure that all the tests in `test_load.py` are sent to the same worker.\r\n\r\nTherefore, the fixture `hf_token` will be called only once (and from the same worker).\r\n\r\nRelated to: #3200.\r\nFix #3219.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3231\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3231\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3231","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3231","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3231.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3231.patch","merged_at":1636361983000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3230","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3230\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3230\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3230\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3230","id":1047135583,"node_id":"PR_kwDODunzps4uNfEd","number":3230,"title":"Add full tagset to conll2003 README","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["I also added the missing `pretty_name` tag in the dataset card to fix the CI"],"created_at":1636358764000,"updated_at":1636454918000,"closed_at":1636454458000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Even though it is possible to manually get the tagset list with\r\n\r\n```python\r\ndset.features[field_name].feature.names\r\n```\r\n\r\nI think it is useful to have an overview of the used tagset on the dataset card. This is particularly useful in light of the **dataset viewer**: the tags are encoded, so it is not immediately obvious what they are for a given sample. Adding a label-int mapping should make it easier for visitors to get a grasp of what they mean.\r\n\r\nFrom user-experience perspective, I would urge the full tagsets to always be available in the README's but I understand that that would take a lot of work, probably. Perhaps it can be automated?\r\n\r\ncloses #3189 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3230\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3230\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3230","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3230","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3230.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3230.patch","merged_at":1636454458000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3229","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3229\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3229\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3229\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3229","id":1046706425,"node_id":"PR_kwDODunzps4uMKsx","number":3229,"title":"Fix URL in CITATION file","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636279475000,"updated_at":1636279486000,"closed_at":1636279485000,"author_association":"MEMBER","active_lock_reason":null,"body":"Currently the BibTeX citation parsed from the CITATION file has wrong URL (it shows the repo URL instead of the proceedings paper URL):\r\n```\r\n@inproceedings{Lhoest_Datasets_A_Community_2021,\r\nauthor = {Lhoest, Quentin and Villanova del Moral, Albert and von Platen, Patrick and Wolf, Thomas and \u0160a\u0161ko, Mario and Jernite, Yacine and Thakur, Abhishek and Tunstall, Lewis and Patil, Suraj and Drame, Mariama and Chaumond, Julien and Plu, Julien and Davison, Joe and Brandeis, Simon and Sanh, Victor and Le Scao, Teven and Canwen Xu, Kevin and Patry, Nicolas and Liu, Steven and McMillan-Major, Angelina and Schmid, Philipp and Gugger, Sylvain and Raw, Nathan and Lesage, Sylvain and Lozhkov, Anton and Carrigan, Matthew and Matussi\u00e8re, Th\u00e9o and von Werra, Leandro and Debut, Lysandre and Bekman, Stas and Delangue, Cl\u00e9ment},\r\nbooktitle = {Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing: System Demonstrations},\r\nmonth = {11},\r\npages = {175--184},\r\npublisher = {Association for Computational Linguistics},\r\ntitle = {{Datasets: A Community Library for Natural Language Processing}},\r\nurl = {https:\/\/github.com\/huggingface\/datasets},\r\nyear = {2021}\r\n}\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3229\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3229\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3229","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3229","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3229.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3229.patch","merged_at":1636279485000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3228","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3228\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3228\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3228\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3228","id":1046702143,"node_id":"PR_kwDODunzps4uMJ58","number":3228,"title":"Add CITATION file","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636278019000,"updated_at":1636278707000,"closed_at":1636278706000,"author_association":"MEMBER","active_lock_reason":null,"body":"Add CITATION file.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3228\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3228\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3228","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3228","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3228.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3228.patch","merged_at":1636278706000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3227","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3227\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3227\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3227\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3227","id":1046667845,"node_id":"I_kwDODunzps4-YuJF","number":3227,"title":"Error in `Json(datasets.ArrowBasedBuilder)` class","user":{"login":"JunShern","id":7796965,"node_id":"MDQ6VXNlcjc3OTY5NjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7796965?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JunShern","html_url":"https:\/\/github.com\/JunShern","followers_url":"https:\/\/api.github.com\/users\/JunShern\/followers","following_url":"https:\/\/api.github.com\/users\/JunShern\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JunShern\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JunShern\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JunShern\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JunShern\/orgs","repos_url":"https:\/\/api.github.com\/users\/JunShern\/repos","events_url":"https:\/\/api.github.com\/users\/JunShern\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JunShern\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["I have additionally identified the source of the error, being that [this condition](https:\/\/github.com\/huggingface\/datasets\/blob\/fc46bba66ba4f432cc10501c16a677112e13984c\/src\/datasets\/packaged_modules\/json\/json.py#L124-L126) in the file\r\n`python3.8\/site-packages\/datasets\/packaged_modules\/json\/json.py` is not being entered correctly:\r\n```python\r\n if (\r\n isinstance(e, pa.ArrowInvalid)\r\n and \"straddling\" not in str(e)\r\n or block_size > len(batch)\r\n ):\r\n```\r\n\r\nFrom what I can tell, in my case the block_size simply needs to be increased, but the error message does not contain \"straddling\" so the condition does trigger correctly and we fail to reach [the line to increase block_size](https:\/\/github.com\/huggingface\/datasets\/blob\/fc46bba66ba4f432cc10501c16a677112e13984c\/src\/datasets\/packaged_modules\/json\/json.py#L135).\r\n\r\nChanging the condition above to simply\r\n```python\r\n if (\r\n block_size > len(batch)\r\n ):\r\n```\r\n\r\nFixes the error for me. I'm happy to create a PR containing this fix if the developers deem the other conditions unnecessary.","Hi ! I think the issue comes from the fact that your JSON file is not a valid JSON Lines file.\r\nEach example should be on one single line.\r\n\r\nCan you try fixing the format to have one line per example and try again ?",":open_mouth: you're right, that did it! I just put everything on a single line (my file only has a single example) and that fixed the error. Thank you so much!"],"created_at":1636264232000,"updated_at":1636484955000,"closed_at":1636484955000,"author_association":"NONE","active_lock_reason":null,"body":"## Describe the bug\r\nWhen a json file contains a `text` field that is larger than the block_size, the JSON dataset builder fails.\r\n\r\n## Steps to reproduce the bug\r\nCreate a folder that contains the following:\r\n```\r\n.\r\n\u251c\u2500\u2500 testdata\r\n\u2502\u00a0\u00a0 \u2514\u2500\u2500 mydata.json\r\n\u2514\u2500\u2500 test.py\r\n```\r\n\r\nPlease download [this file](https:\/\/github.com\/huggingface\/datasets\/files\/7491797\/mydata.txt) as `mydata.json`. (The error does not occur in JSON files with shorter text, but it is reproducible when the text is long as in the file I provide)\r\n:exclamation: :exclamation: GitHub doesn't allow me to upload JSON so this file is a TXT, and you should rename it to `.json`!\r\n\r\n`test.py` simply contains:\r\n```python\r\nfrom datasets import load_dataset\r\nmy_dataset = load_dataset(\"testdata\")\r\n```\r\n\r\nTo reproduce the error, simply run\r\n```\r\npython test.py\r\n```\r\n\r\n## Expected results\r\nThe data should load correctly without error.\r\n\r\n## Actual results\r\nThe dataset builder fails with:\r\n```\r\nUsing custom data configuration testdata-d490389b8ab4fd82\r\nDownloading and preparing dataset json\/testdata to \/home\/junshern.chan\/.cache\/huggingface\/datasets\/json\/testdata-d490389b8ab4fd82\/0.0.0\/3333a8af0db9764dfcff43a42ff26228f0f2e267f0d8a0a294452d188beadb34...\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 1\/1 [00:00<00:00, 2264.74it\/s]\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 1\/1 [00:00<00:00, 447.01it\/s]\r\nFailed to read file '\/home\/junshern.chan\/hf-json-bug\/testdata\/mydata.json' with error : JSON parse error: Missing a name for object member. in row 0\r\nTraceback (most recent call last):\r\n File \"test.py\", line 28, in \r\n my_dataset = load_dataset(\"testdata\")\r\n File \"\/home\/junshern.chan\/.casio\/miniconda\/envs\/hf-json-bug\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 1632, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/home\/junshern.chan\/.casio\/miniconda\/envs\/hf-json-bug\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/home\/junshern.chan\/.casio\/miniconda\/envs\/hf-json-bug\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 697, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/home\/junshern.chan\/.casio\/miniconda\/envs\/hf-json-bug\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 1156, in _prepare_split\r\n for key, table in utils.tqdm(\r\n File \"\/home\/junshern.chan\/.casio\/miniconda\/envs\/hf-json-bug\/lib\/python3.8\/site-packages\/tqdm\/std.py\", line 1168, in __iter__\r\n for obj in iterable:\r\n File \"\/home\/junshern.chan\/.casio\/miniconda\/envs\/hf-json-bug\/lib\/python3.8\/site-packages\/datasets\/packaged_modules\/json\/json.py\", line 146, in _generate_tables\r\n raise ValueError(\r\nValueError: Not able to read records in the JSON file at \/home\/junshern.chan\/hf-json-bug\/testdata\/mydata.json. You should probably indicate the field of the JSON file containing your records. This JSON file contain the following fields: ['text']. Select the correct one and provide it as `field='XXX'` to the dataset loading method. \r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.15.1\r\n- Platform: Linux-5.8.0-63-generic-x86_64-with-glibc2.17\r\n- Python version: 3.8.12\r\n- PyArrow version: 6.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3227\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3227\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3226","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3226\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3226\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3226\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3226","id":1046584518,"node_id":"PR_kwDODunzps4uL0ma","number":3226,"title":"Fix paper BibTeX citation with proceedings reference","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636228379000,"updated_at":1636268728000,"closed_at":1636268727000,"author_association":"MEMBER","active_lock_reason":null,"body":"Fix paper BibTeX citation with proceedings reference.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3226\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3226\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3226","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3226","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3226.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3226.patch","merged_at":1636268727000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3225","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3225\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3225\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3225\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3225","id":1046530493,"node_id":"PR_kwDODunzps4uLrB3","number":3225,"title":"Update tatoeba to v2021-07-22","user":{"login":"KoichiYasuoka","id":15098598,"node_id":"MDQ6VXNlcjE1MDk4NTk4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15098598?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/KoichiYasuoka","html_url":"https:\/\/github.com\/KoichiYasuoka","followers_url":"https:\/\/api.github.com\/users\/KoichiYasuoka\/followers","following_url":"https:\/\/api.github.com\/users\/KoichiYasuoka\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/KoichiYasuoka\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/KoichiYasuoka\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/KoichiYasuoka\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/KoichiYasuoka\/orgs","repos_url":"https:\/\/api.github.com\/users\/KoichiYasuoka\/repos","events_url":"https:\/\/api.github.com\/users\/KoichiYasuoka\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/KoichiYasuoka\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["How about this? @lhoestq @abhishekkrthakur ","Hi ! I think it would be nice if people could still be able to load the old version.\r\nMaybe this can be a parameter ? For example to load the old version they could do\r\n```python\r\nload_dataset(\"tatoeba\", lang1=\"en\", lang2=\"mr\", date=\"v2020-11-09\")\r\n```\r\n\r\nIf it sounds good to you, we can add this parameter to the TatoebaConfig:\r\n```python\r\nclass TatoebaConfig(datasets.BuilderConfig):\r\n def __init__(self, *args, lang1=None, lang2=None, date=\"v2021-07-22\", **kwargs):\r\n self.date = date\r\n```\r\nand then pass the date to the URL\r\n```python\r\n_BASE_URL = \"https:\/\/object.pouta.csc.fi\/OPUS-Tatoeba\/{}\/moses\/{}-{}.txt.zip\"\r\n```\r\n```python\r\n def _base_url(lang1, lang2, date):\r\n return _BASE_URL.format(date, lang1, lang2)\r\n```\r\n\r\nWhat do you think ?","`_DATE = \"v\" + \"-\".join(s.zfill(2) for s in _VERSION.split(\".\"))` seems rather tricky but works well. How about this? @lhoestq \r\n","The CI is only failing because of the missing sections in the dataset card, and because of an issue with the CER metric that is unrelated to this PR"],"created_at":1636211671000,"updated_at":1636715593000,"closed_at":1636715593000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Tatoeba's latest version is v2021-07-22","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3225\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3225\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3225","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3225","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3225.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3225.patch","merged_at":1636715593000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3224","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3224\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3224\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3224\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3224","id":1046495831,"node_id":"PR_kwDODunzps4uLk2q","number":3224,"title":"User-pickling with dynamic sub-classing","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["@lhoestq Feel free to have a look. The implementation is slightly different from what you suggested. I have opted to overwrite `save` instead of meddling with `save_global`. `save_global` is called very late down in dill\/pickle so it is hard to control for what is happening there. I might be wrong. Pickling is more complex than I thought! \r\n\r\nThe linked issue (`map` with spaCy) also works now!\r\n\r\n```python\r\nimport pickle\r\nimport spacy\r\nfrom spacy import Language\r\nfrom datasets import load_dataset\r\nfrom datasets.utils.py_utils import dumps, pklregister\r\n\r\n@pklregister(Language, allow_subclasses=True)\r\ndef hash_spacy_language(pickler, nlp: Language):\r\n pickler.save(nlp.to_bytes())\r\n\r\ndef main():\r\n fin = r\"large\/file.txt\"\r\n nlp = spacy.load(\"en_core_web_sm\")\r\n\r\n def tokenize(l):\r\n return {\"tok\": [t.text for t in nlp(l[\"text\"])]}\r\n\r\n ds = load_dataset(\"text\", data_files=fin)\r\n ds = ds[\"train\"].map(tokenize)\r\n\r\n # Sanity check: load NLP from pickle created with our own `dumps`\r\n config = nlp.config\r\n lang_cls = spacy.util.get_lang_class(config[\"nlp\"][\"lang\"])\r\n nlp2 = lang_cls.from_config(config)\r\n nlp2.from_bytes(pickle.loads(dumps(nlp)))\r\n\r\n assert isinstance(nlp2, type(nlp))\r\n assert dumps(nlp) == dumps(nlp2)\r\n\r\nif __name__ == '__main__':\r\n main()\r\n```\r\n\r\nIf this all looks good to you, I'll start writing on some documentation and examples.\r\n","One more thing. This is a reduction function for SpaCy Language that should work with the new API:\r\n```python\r\n@pklregister(Language, allow_subclasses=True)\r\ndef hash_spacy_language(pickler, obj):\r\n def create_language(config, bytes_data):\r\n lang_cls = spacy.util.get_lang_class(config[\"nlp\"][\"lang\"])\r\n nlp = lang_cls.from_config(config)\r\n return nlp.from_bytes(bytes_data)\r\n\r\n args = (obj.config, obj.to_bytes())\r\n pickler.save_reduce(create_language, args, obj=obj)\r\n```\r\nso IMO we are missing a test with `pickler.save_reduce`. ","> One more thing. This is a reduction function for SpaCy Language that should work with the new API:\r\n> \r\n> ```python\r\n> @pklregister(Language, allow_subclasses=True)\r\n> def hash_spacy_language(pickler, obj):\r\n> def create_language(config, bytes_data):\r\n> lang_cls = spacy.util.get_lang_class(config[\"nlp\"][\"lang\"])\r\n> nlp = lang_cls.from_config(config)\r\n> return nlp.from_bytes(bytes_data)\r\n> \r\n> args = (obj.config, obj.to_bytes())\r\n> pickler.save_reduce(create_language, args, obj=obj)\r\n> ```\r\n> \r\n> so IMO we are missing a test with `pickler.save_reduce`.\r\n\r\nSure that seems a good idea, but I do not quite understand what `save_reduce` does. Could you give some more info about what reduce functions do and how they differ from regular `save` and `save_global`? I've read about it but the docs nor the built-in `pickle` code seem really helpful.","I'm no pickle expect, but here is my understanding. I believe the pickler uses the reduce function when you do `loads` to reconstructs the original object from the parameters\/arguments that were saved with `dumps`.\r\n\r\nFor example your sanity check could be simplified from\r\n```python\r\n config = nlp.config\r\n lang_cls = spacy.util.get_lang_class(config[\"nlp\"][\"lang\"])\r\n nlp2 = lang_cls.from_config(config)\r\n nlp2 = nlp2.from_bytes(pickle.loads(dumps(nlp)))\r\n```\r\nto\r\nEDIT: pickle.loads(pickle.dumps(nlp))<\/s>\r\n```python\r\n nlp2 = loads(dumps(nlp)) # using our custom pickler\r\n```\r\n\r\nThough note that while it can be convenient for tests, we actually don't care about the reconstruction of the object since we're only using the pickler for `dumps` to compute hashes.","> I'm no pickle expect, but here is my understanding. I believe the pickler uses the reduce function when you do `loads` to reconstructs the original object from the parameters\/arguments that were saved with `dumps`.\r\n> \r\n> For example your sanity check could be simplified from\r\n> \r\n> ```python\r\n> config = nlp.config\r\n> lang_cls = spacy.util.get_lang_class(config[\"nlp\"][\"lang\"])\r\n> nlp2 = lang_cls.from_config(config)\r\n> nlp2 = nlp2.from_bytes(pickle.loads(dumps(nlp)))\r\n> ```\r\n> \r\n> to\r\n> \r\n> ```python\r\n> nlp2 = pickle.loads(pickle.dumps(nlp))\r\n> ```\r\n> \r\n> Though note that while it can be convenient for tests, we actually don't care about the reconstruction of the object since we're only using the pickler for `dumps` to compute hashes.\r\n\r\nYes, the sanity check can be simplified like that _if_ we use `pickle.dumps` - but that would not test our own `dumps` functionality and would do a naive dump instead of using `to_bytes`. It won't work if we use our own `dumps`, exactly because of the reason that we want custom pickling and being able to call `to_bytes`. To reconstruct the object from the pickled bytes from `to_bytes` we need `from_bytes`. The result of pickle\/dill loads will therefore always be a `bytes` object and not a `Language` object.\r\n\r\nBut `save_reduce` is called when saving, right? Not when loading, AFAICT. I am just not sure what exactly it is saving. It is _potentially_ called [at the end of `save`](https:\/\/github.com\/python\/cpython\/blob\/24af9a40a8f85af813ea89998aa4e931fcc78cd9\/Lib\/pickle.py#L603) but only if we haven't returned by then. I just can't figure out what that base case is.","I don't think we expect users to write the reduce function that isn't going to be used anyway. So maybe let's stick with `save` ?","@BramVanroy \r\nAs I understand `save_reduce` is very similar to `copyreg.pickle`, so I'd suggest you to check the following links:\r\n* https:\/\/docs.python.org\/3\/library\/copyreg.html#copyreg.pickle\r\n* https:\/\/docs.python.org\/3\/library\/pickle.html#object.__reduce__\r\n\r\n\r\n@lhoestq \r\n> I don't think we expect users to write the reduce function that isn't going to be used anyway. So maybe let's stick with save ?\r\n\r\nI agree. \r\n\r\n`save_reduce` is very similar to `copyreg.pickle` and `object.__reduce__`, which are part of public API (and `save` isn't), so I expect more advanced users to know how to write their own reduction functions. But, as you say, `pklregister` should also work with `save` (even though I think `save` is a bit lower-level, and harder to understand than `save_reduce`).\r\n\r\nAll our examples in `py_utils` that use `pklregister` also use `save_reduce` in the last step, so my reduction for SpaCy is meant to be added there, and not to be written by users (because SpaCy is very popular, so the official support by us makes sense :)).\r\n\r\nAnd in the tests, let's ignore the reconstruction part of pickle\/dill, because it's not important for us, and focus on the generated dumps. What do you think?","@mariosasko What exactly do you mean with \"isn't part of the public API\"? It is [a public method](https:\/\/github.com\/python\/cpython\/blob\/24af9a40a8f85af813ea89998aa4e931fcc78cd9\/Lib\/pickle.py#L535) in base pickle, just like `dump` is but maybe you mean something else.","@BramVanroy Oh sorry, it's public (not prefixed with `\"_\"`) but it's not documented in the docs. `save_reduce` is also not in the docs, but its signature\/functionality is similar to `copyreg.pickle` and I see it more often being used in the projects on GH, so it's seems \"more public\" to me. ","Unfortunately I feel that pickle in general is under-documented. \ud83d\ude04 \r\n\r\nFor the documentation, I can add a brief example, maybe under \"How-to Guides\"? The only thing that isn't immediately obvious to me is how I can add that doc page to the TOC?","Yes great idea ! To add that doc page to the TOC, you just have to add it to the index.rst file in the \"How-to guides\" TOC section","@mariosasko @lhoestq Feel free to make any edits or suggestions in the text!","Hi @mariosasko. I wish you'd told me sooner, as I spent quite some time writing on this.\r\n\r\nI'm also not sure whether it is too advanced to have in the documentation. The spaCy use-case seems potentially frequent. Or do you wish to add that case to the defaults, and whenever new issues come up that seem like frequent\/obvious cases, add those internally as well?","Documenting the internal `pklregister` is overkill IMO (and it can be kept in docstrings), but we can document something higher level like `register_hash_func` once it's implemented.\r\n\r\nSo we keep the nice documentation you've written (thank you!), except we can rename it to \"Advanced caching\" and show an API that is similar to\r\n```python\r\n>>> @register_hash_func(Language, allow_subclasses=True)\r\n>>> def hash_spacy_language(nlp: Language):\r\n>>> return (nlp.to_bytes(),)\r\n```\r\nThis way we keep the documentation centered around the public API rather than the internals that may evolve\/be too complicated to fit only one section.\r\n\r\n> Or do you wish to add that case to the defaults, and whenever new issues come up that seem like frequent\/obvious cases, add those internally as well?\r\n\r\nLet's add it to the defaults since it's a frequent use-case. And also allow users to control the hashing using the API mentioned above if they face other non-trivially-hashable objects","Sure, I can have a go at implementing spaCy as a built-in. Should it be included in the tests? (Therefore adding spaCy to the tests requirements.)\r\n\r\nNext, from your example, it seems that the return value of `register_hash_func` will be used in pickler.save automatically (calling pklregister a bit deeper). Any reason why it returns a tuple? I can work on this as well, if needed.","> Sure, I can have a go at implementing spaCy as a built-in. Should it be included in the tests? (Therefore adding spaCy to the tests requirements.)\r\n\r\nThat would be perfect !\r\n\r\n> Next, from your example, it seems that the return value of register_hash_func will be used in pickler.save automatically (calling pklregister a bit deeper). \r\n\r\nYes I think so. For example register_hash_func can call pklregister with the user's function, but wrapped to use pickler.save.\r\n\r\n> Any reason why it returns a tuple? I can work on this as well, if needed.\r\n\r\nIt can either return an arbitrary object or a tuple. I like it a bit better if it's a tuple, so users understand more easily how to make the function take into account more than one item for the hash. It's also consistent with the streamlit caching functions, that also require a tuple. No strong opinion on this though\r\n\r\nLet me know if I can help with anything","@lhoestq I do not have the time anymore to work on this. Can someone else pick this up?","Hi ! Sure someone else can continue this PR (either someone from HF, or other contributors can fork the PR).\r\nI think I can work on this next week or the week after, but if anyone wants to work on this earlier feel free to comment here :)"],"created_at":1636200504000,"updated_at":1641388296000,"closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"This is a continuation of the now closed PR in https:\/\/github.com\/huggingface\/datasets\/pull\/3206. The discussion there has shaped a new approach to do this.\r\n\r\nIn this PR, behavior of `pklregister` and `Pickler` is extended. Earlier, users were already able to register custom pickle functions. That is useful if they have objects that are not easily picklable with default methods. When one registers a custom function to a type, an object of that type will be pickled with the given function by `Pickler` which looks up the type in its `dispatch` table. The downside of this method, and of `pickle` in general, is that it is limited to direct type-matching and does not allow sub-classes. In many, default, cases that is not an issue. But when you are using external libraries where classes (e.g. parsers, models) are sub-classed this is not ideal. \r\n\r\n```python\r\nfrom datasets.fingerprint import Hasher\r\nfrom datasets.utils.py_utils import pklregister\r\n\r\nclass BaseParser:\r\n pass\r\n\r\nclass EnglishParser(BaseParser):\r\n pass\r\n\r\n@pklregister(BaseParser)\r\ndef custom_pkl_func(pickler, obj):\r\n print(f\"Called the custom pickle function for type {type(obj)}!\")\r\n # do something with the obj and ultimately save with the pickler\r\n\r\nbase = BaseParser()\r\nen = EnglishParser()\r\n\r\n# Hasher.hash uses the Pickler behind the scenes\r\n# `custom_pkl_func` called for base\r\nHasher.hash(base)\r\n# `custom_pkl_func` not called for en :-(\r\nHasher.hash(en)\r\n```\r\n\r\nIn the example above we'd want to sub-class `EnglishParser` to be handled in the same way as its super-class `BaseParser`. This PR solves that by allowing for a keyword-argument `allow_subclasses` in `pklregister` (default: `False`). \r\n\r\n```python\r\n@pklregister(BaseParser, allow_subclasses=True)\r\n```\r\n\r\nWhen this option is enabled, we not only save the function in `Pickler.dispatch` but also save it in a custom table `Pickler.subclass_dispatch` **which allows us to dynamically add sub-classes of that class to the real dispatch table**. Then, if we want to pickle an object `obj` with `Pickler.dump()` (which ultimately will call `Pickler.save()`) we _first_ check whether any of the object's super-classes exist in `Pickler.sublcass_dispatch` and get the related custom pickle function. If we find one, we add the type of `obj` alongside the function to `Pickler.dispatch`. All of this happens at the start of the call to `Pickler.save()`. _Only then_ dill.Pickler's `save` will be called, which in turn will call `pickle._Pickler.save` which handles everything. Here, the `Pickler.dispatch` table will be used to look up custom pickler functions - and it now also includes the function for `obj`, which was copied from its super-class, which we added at the very start of our custom `Pickler.save()`.\r\n\r\nFor edge cases and, especially, for testing, a contextmanager class `TempPickleRegistry` is included that resets the pickle registry on exit to its previous state.\r\n\r\n```python\r\nwith TempPickleRegistry():\r\n @pklregister(MyObjClass)\r\n def pickle_registry_test_false(pickler, obj):\r\n pickler.save(obj.fancy_method())\r\n\r\n some_obj = MyObjClass()\r\n dumps(some_obj)\r\n # `MyObjClass` is in Pickler.dispatch\r\n\r\n# ... `MyObjClass` is _not_ in Pickler.dispatch anymore\r\n```\r\n\r\ncloses https:\/\/github.com\/huggingface\/datasets\/issues\/3178\r\n\r\nTo Do\r\n====\r\n- [x] Write tests\r\n- [ ] Write documentation\/examples?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3224\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3224\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3224","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3224","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3224.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3224.patch","merged_at":null},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3223","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3223\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3223\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3223\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3223","id":1046445507,"node_id":"PR_kwDODunzps4uLb1E","number":3223,"title":"Update BibTeX entry","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636180912000,"updated_at":1636182398000,"closed_at":1636182398000,"author_association":"MEMBER","active_lock_reason":null,"body":"Update BibTeX entry.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3223\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3223\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3223","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3223","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3223.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3223.patch","merged_at":1636182398000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3222","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3222\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3222\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3222\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3222","id":1046299725,"node_id":"PR_kwDODunzps4uK_uG","number":3222,"title":"Add docs for audio processing","user":{"login":"stevhliu","id":59462357,"node_id":"MDQ6VXNlcjU5NDYyMzU3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59462357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stevhliu","html_url":"https:\/\/github.com\/stevhliu","followers_url":"https:\/\/api.github.com\/users\/stevhliu\/followers","following_url":"https:\/\/api.github.com\/users\/stevhliu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stevhliu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stevhliu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stevhliu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stevhliu\/orgs","repos_url":"https:\/\/api.github.com\/users\/stevhliu\/repos","events_url":"https:\/\/api.github.com\/users\/stevhliu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stevhliu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Nice ! love it this way. I guess you can set this PR to \"ready for review\" ?","I guess we can merge this one now :)"],"created_at":1636153679000,"updated_at":1637771528000,"closed_at":1637768152000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"This PR adds documentation for the `Audio` feature. It describes:\r\n\r\n- The difference between loading `path` and `audio`, as well as use-cases\/best practices for each of them.\r\n- Resampling audio files with `cast_column`, and then calling `ds[0][\"audio\"]` to automatically decode and resample to the desired sampling rate.\r\n- Resampling with `map`.\r\n\r\nPreview [here](https:\/\/52969-250213286-gh.circle-artifacts.com\/0\/docs\/_build\/html\/audio_process.html), let me know if I'm missing anything!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3222\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3222\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3222","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3222","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3222.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3222.patch","merged_at":1637768152000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3221","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3221\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3221\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3221\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3221","id":1045890512,"node_id":"PR_kwDODunzps4uJp4Z","number":3221,"title":"Resolve data_files by split name","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Really cool!\r\nWhen splitting by folder, what do we use for validation set (\"valid\", \"validation\" or both)?","> When splitting by folder, what do we use for validation set (\"valid\", \"validation\" or both)?\r\n\r\nBoth are fine :) As soon as it has \"valid\" in it","Merging for now, if you have comments about the documentation we can address them in subsequent PRs :)","Thanks for the comments @stevhliu :) I just opened https:\/\/github.com\/huggingface\/datasets\/pull\/3233 to take them into account"],"created_at":1636121255000,"updated_at":1636379540000,"closed_at":1636134598000,"author_association":"MEMBER","active_lock_reason":null,"body":"As discussed in https:\/\/github.com\/huggingface\/datasets\/issues\/3027 we should automatically infer what file is supposed to go to what split automatically, based on filenames.\r\n\r\nI added the support for different kinds of patterns, for both dataset repositories and local directories:\r\n\r\n```\r\n Input structure:\r\n\r\n my_dataset_repository\/\r\n \u251c\u2500\u2500 README.md\r\n \u2514\u2500\u2500 dataset.csv\r\n\r\n Output patterns:\r\n\r\n {\"train\": [\"*\"]}\r\n```\r\n```\r\n Input structure:\r\n\r\n my_dataset_repository\/\r\n \u251c\u2500\u2500 README.md\r\n \u251c\u2500\u2500 train.csv\r\n \u2514\u2500\u2500 test.csv\r\n\r\n my_dataset_repository\/\r\n \u251c\u2500\u2500 README.md\r\n \u2514\u2500\u2500 data\/\r\n \u251c\u2500\u2500 train.csv\r\n \u2514\u2500\u2500 test.csv\r\n\r\n my_dataset_repository\/\r\n \u251c\u2500\u2500 README.md\r\n \u251c\u2500\u2500 train_0.csv\r\n \u251c\u2500\u2500 train_1.csv\r\n \u251c\u2500\u2500 train_2.csv\r\n \u251c\u2500\u2500 train_3.csv\r\n \u251c\u2500\u2500 test_0.csv\r\n \u2514\u2500\u2500 test_1.csv\r\n\r\n Output patterns:\r\n\r\n {\"train\": [\"*train*\"], \"test\": [\"*test*\"]}\r\n```\r\n```\r\n Input structure:\r\n\r\n my_dataset_repository\/\r\n \u251c\u2500\u2500 README.md\r\n \u2514\u2500\u2500 data\/\r\n \u251c\u2500\u2500 train\/\r\n \u2502 \u251c\u2500\u2500 shard_0.csv\r\n \u2502 \u251c\u2500\u2500 shard_1.csv\r\n \u2502 \u251c\u2500\u2500 shard_2.csv\r\n \u2502 \u2514\u2500\u2500 shard_3.csv\r\n \u2514\u2500\u2500 test\/\r\n \u251c\u2500\u2500 shard_0.csv\r\n \u2514\u2500\u2500 shard_1.csv\r\n\r\n Output patterns:\r\n\r\n {\"train\": [\"*train*\/*\", \"*train*\/**\/*\"], \"test\": [\"*test*\/*\", \"*test*\/**\/*\"]}\r\n```\r\n\r\nand also this pattern that allows to have custom split names, and that is the structure used by #3098 for `push_to_hub` (cc @LysandreJik ):\r\n\r\n```\r\n Input structure:\r\n\r\n my_dataset_repository\/\r\n \u251c\u2500\u2500 README.md\r\n \u2514\u2500\u2500 data\/\r\n \u251c\u2500\u2500 train-00000-of-00003.csv\r\n \u251c\u2500\u2500 train-00001-of-00003.csv\r\n \u251c\u2500\u2500 train-00002-of-00003.csv\r\n \u251c\u2500\u2500 test-00000-of-00001.csv\r\n \u251c\u2500\u2500 random-00000-of-00003.csv\r\n \u251c\u2500\u2500 random-00001-of-00003.csv\r\n \u2514\u2500\u2500 random-00002-of-00003.csv\r\n\r\n Output patterns:\r\n\r\n {\r\n \"train\": [\"data\/train-[0-9][0-9][0-9][0-9][0-9]-of-[0-9][0-9][0-9][0-9][0-9].*\"],\r\n \"test\": [\"data\/test-[0-9][0-9][0-9][0-9][0-9]-of-[0-9][0-9][0-9][0-9][0-9].*\"],\r\n \"random\": [\"data\/random-[0-9][0-9][0-9][0-9][0-9]-of-[0-9][0-9][0-9][0-9][0-9].*\"],\r\n }\r\n```\r\n\r\nYou can check the documentation about structuring your repository [here](https:\/\/52640-250213286-gh.circle-artifacts.com\/0\/docs\/_build\/html\/repository_structure.html). cc @stevhliu \r\n\r\n\r\nFix https:\/\/github.com\/huggingface\/datasets\/issues\/3027\r\nFix https:\/\/github.com\/huggingface\/datasets\/issues\/3212\r\n\r\nIn the future we can also add support for dataset configurations.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3221\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3221\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3221","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3221","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3221.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3221.patch","merged_at":1636134597000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3220","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3220\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3220\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3220\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3220","id":1045549029,"node_id":"I_kwDODunzps4-Uc_l","number":3220,"title":"Add documentation about dataset viewer feature","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636099879000,"updated_at":1636099879000,"closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"body":"Add to the docs more details about the dataset viewer feature in the Hub.\r\n\r\nCC: @julien-c \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3220\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3220\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3219","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3219\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3219\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3219\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3219","id":1045095000,"node_id":"I_kwDODunzps4-SuJY","number":3219,"title":"Eventual Invalid Token Error at setup of private datasets","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636051845000,"updated_at":1636377786000,"closed_at":1636361983000,"author_association":"MEMBER","active_lock_reason":null,"body":"## Describe the bug\r\nFrom time to time, there appear Invalid Token errors with private datasets:\r\n\r\n- https:\/\/app.circleci.com\/pipelines\/github\/huggingface\/datasets\/8520\/workflows\/d44629f2-4749-40f8-a657-50931d0b3434\/jobs\/52534\r\n ```\r\n ____________ ERROR at setup of test_load_streaming_private_dataset _____________\r\n ValueError: Invalid token passed!\r\n\r\n ____ ERROR at setup of test_load_streaming_private_dataset_with_zipped_data ____\r\n ValueError: Invalid token passed!\r\n \r\n =========================== short test summary info ============================\r\n ERROR tests\/test_load.py::test_load_streaming_private_dataset - ValueError: I...\r\n ERROR tests\/test_load.py::test_load_streaming_private_dataset_with_zipped_data\r\n ```\r\n\r\n- https:\/\/app.circleci.com\/pipelines\/github\/huggingface\/datasets\/8557\/workflows\/a8383181-ba6d-4487-9d0a-f750b6dcb936\/jobs\/52763\r\n ```\r\n ____ ERROR at setup of test_load_streaming_private_dataset_with_zipped_data ____\r\n [gw1] linux -- Python 3.6.15 \/home\/circleci\/.pyenv\/versions\/3.6.15\/bin\/python3.6\r\n\r\n hf_api = \r\n hf_token = 'vgNbyuaLNEBuGbgCEtSBCOcPjZnngJufHkTaZvHwkXKGkHpjBPwmLQuJVXRxBuaRzNlGjlMpYRPbthfHPFWXaaEDTLiqTTecYENxukRYVAAdpeApIUPxcgsowadkTkPj'\r\n zip_csv_path = PosixPath('\/tmp\/pytest-of-circleci\/pytest-0\/popen-gw1\/data16\/dataset.csv.zip')\r\n\r\n @pytest.fixture(scope=\"session\")\r\n def hf_private_dataset_repo_zipped_txt_data_(hf_api: HfApi, hf_token, zip_csv_path):\r\n repo_name = \"repo_zipped_txt_data-{}\".format(int(time.time() * 10e3))\r\n hf_api.create_repo(token=hf_token, name=repo_name, repo_type=\"dataset\", private=True)\r\n repo_id = f\"{USER}\/{repo_name}\"\r\n hf_api.upload_file(\r\n token=hf_token,\r\n path_or_fileobj=str(zip_csv_path),\r\n path_in_repo=\"data.zip\",\r\n repo_id=repo_id,\r\n > repo_type=\"dataset\",\r\n )\r\n\r\n tests\/hub_fixtures.py:68:\r\n\r\n ...\r\n\r\n ValueError: Invalid token passed!\r\n =========================== short test summary info ============================\r\n ERROR tests\/test_load.py::test_load_streaming_private_dataset_with_zipped_data\r\n ```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3219\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3219\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3218","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3218\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3218\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3218\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3218","id":1045032313,"node_id":"PR_kwDODunzps4uG2UA","number":3218,"title":"Fix code quality in riddle_sense dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636047800000,"updated_at":1636048203000,"closed_at":1636048202000,"author_association":"MEMBER","active_lock_reason":null,"body":"Fix trailing whitespace.\r\n\r\nFix #3217.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3218\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3218\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3218","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3218","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3218.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3218.patch","merged_at":1636048202000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3217","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3217\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3217\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3217\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3217","id":1045029710,"node_id":"I_kwDODunzps4-SeNO","number":3217,"title":"Fix code quality bug in riddle_sense dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["To give more context: https:\/\/github.com\/psf\/black\/issues\/318. `black` doesn't treat this as a bug, but `flake8` does. \r\n"],"created_at":1636047632000,"updated_at":1636048202000,"closed_at":1636048202000,"author_association":"MEMBER","active_lock_reason":null,"body":"## Describe the bug\r\n```\r\ndatasets\/riddle_sense\/riddle_sense.py:36:21: W291 trailing whitespace\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3217\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3217\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3216","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3216\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3216\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3216\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3216","id":1045027733,"node_id":"PR_kwDODunzps4uG1YS","number":3216,"title":"Pin version exclusion for tensorflow incompatible with keras","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636047486000,"updated_at":1636109858000,"closed_at":1636109857000,"author_association":"MEMBER","active_lock_reason":null,"body":"Once `tensorflow` version 2.6.2 is released:\r\n- https:\/\/github.com\/tensorflow\/tensorflow\/commit\/c1867f3bfdd1042f694df7a9870be51ba80543cb\r\n- https:\/\/pypi.org\/project\/tensorflow\/2.6.2\/\r\n\r\nwith the patch:\r\n- tensorflow\/tensorflow#52927\r\n\r\nwe can remove the temporary fix we introduced in:\r\n- #3208\r\n\r\nFix #3209.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3216\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3216\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3216","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3216","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3216.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3216.patch","merged_at":1636109857000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3215","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3215\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3215\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3215\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3215","id":1045011207,"node_id":"PR_kwDODunzps4uGx4o","number":3215,"title":"Small updates to to_tf_dataset documentation","user":{"login":"Rocketknight1","id":12866554,"node_id":"MDQ6VXNlcjEyODY2NTU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12866554?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Rocketknight1","html_url":"https:\/\/github.com\/Rocketknight1","followers_url":"https:\/\/api.github.com\/users\/Rocketknight1\/followers","following_url":"https:\/\/api.github.com\/users\/Rocketknight1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Rocketknight1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Rocketknight1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Rocketknight1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Rocketknight1\/orgs","repos_url":"https:\/\/api.github.com\/users\/Rocketknight1\/repos","events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["@stevhliu Accepted both suggestions, thanks for the review!"],"created_at":1636046521000,"updated_at":1636052138000,"closed_at":1636052137000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"I added a little more description about `to_tf_dataset` compared to just setting the format","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3215\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3215\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3215","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3215","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3215.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3215.patch","merged_at":1636052137000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3214","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3214\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3214\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3214\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3214","id":1044924050,"node_id":"I_kwDODunzps4-SEaS","number":3214,"title":"Add ACAV100M Dataset","user":{"login":"nateraw","id":32437151,"node_id":"MDQ6VXNlcjMyNDM3MTUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32437151?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nateraw","html_url":"https:\/\/github.com\/nateraw","followers_url":"https:\/\/api.github.com\/users\/nateraw\/followers","following_url":"https:\/\/api.github.com\/users\/nateraw\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nateraw\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nateraw\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nateraw\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nateraw\/orgs","repos_url":"https:\/\/api.github.com\/users\/nateraw\/repos","events_url":"https:\/\/api.github.com\/users\/nateraw\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nateraw\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":3608941089,"node_id":"LA_kwDODunzps7XHBIh","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/vision","name":"vision","color":"bfdadc","default":false,"description":"Vision datasets"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636041598000,"updated_at":1638964830000,"closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"## Adding a Dataset\r\n- **Name:** *ACAV100M*\r\n- **Description:** *contains 100 million videos with high audio-visual correspondence, ideal for self-supervised video representation learning.*\r\n- **Paper:** *https:\/\/arxiv.org\/abs\/2101.10803*\r\n- **Data:** *https:\/\/github.com\/sangho-vision\/acav100m*\r\n- **Motivation:** *The largest dataset (to date) for audio-visual learning.*\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3214\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3214\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3213","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3213\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3213\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3213\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3213","id":1044745313,"node_id":"PR_kwDODunzps4uF6W9","number":3213,"title":"Fix tuple_ie download url","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636031347000,"updated_at":1636121766000,"closed_at":1636121765000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Fix #3204 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3213\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3213\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3213","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3213","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3213.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3213.patch","merged_at":1636121765000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3212","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3212\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3212\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3212\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3212","id":1044640967,"node_id":"I_kwDODunzps4-Q_TH","number":3212,"title":"Sort files before loading","user":{"login":"lvwerra","id":8264887,"node_id":"MDQ6VXNlcjgyNjQ4ODc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8264887?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lvwerra","html_url":"https:\/\/github.com\/lvwerra","followers_url":"https:\/\/api.github.com\/users\/lvwerra\/followers","following_url":"https:\/\/api.github.com\/users\/lvwerra\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lvwerra\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lvwerra\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lvwerra\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lvwerra\/orgs","repos_url":"https:\/\/api.github.com\/users\/lvwerra\/repos","events_url":"https:\/\/api.github.com\/users\/lvwerra\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lvwerra\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["This will be fixed by https:\/\/github.com\/huggingface\/datasets\/pull\/3221"],"created_at":1636024111000,"updated_at":1636134598000,"closed_at":1636134598000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"When loading a dataset that consists of several files (e.g. `my_data\/data_001.json`, `my_data\/data_002.json` etc.) they are not loaded in order when using `load_dataset(\"my_data\")`.\r\n\r\nThis could lead to counter-intuitive results if, for example, the data files are sorted by date or similar since they would appear in different order in the `Dataset`.\r\n\r\nThe straightforward solution is to sort the list of files alphabetically before loading them.\r\n\r\ncc @lhoestq \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3212\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3212\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3211","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3211\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3211\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3211\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3211","id":1044617913,"node_id":"PR_kwDODunzps4uFkBx","number":3211,"title":"Fix disable_nullable default value to False","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636023126000,"updated_at":1636024101000,"closed_at":1636024100000,"author_association":"MEMBER","active_lock_reason":null,"body":"Currently the `disable_nullable` parameter is not consistent across all dataset transforms. For example it is `False` in `map` but `True` in `flatten_indices`.\r\n\r\nThis creates unexpected behaviors like this\r\n```python\r\nfrom datasets import Dataset, concatenate_datasets\r\n\r\nd1 = Dataset.from_dict({\"a\": [0, 1, 2, 3]})\r\nd2 = d1.filter(lambda x: x[\"a\"] < 2).flatten_indices()\r\nd1.data.schema == d2.data.schema # False\r\n```\r\nThis can cause issues when concatenating datasets for example.\r\n\r\nFor consistency I set `disable_nullable` to `False` in `flatten_indices` and I fixed some docstrings\r\n\r\ncc @SBrandeis ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3211\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3211\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3211","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3211","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3211.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3211.patch","merged_at":1636024100000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3210","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3210\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3210\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3210\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3210","id":1044611471,"node_id":"I_kwDODunzps4-Q4GP","number":3210,"title":"ConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.15.1\/datasets\/wmt16\/wmt16.py","user":{"login":"xiuzhilu","id":28184983,"node_id":"MDQ6VXNlcjI4MTg0OTgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28184983?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/xiuzhilu","html_url":"https:\/\/github.com\/xiuzhilu","followers_url":"https:\/\/api.github.com\/users\/xiuzhilu\/followers","following_url":"https:\/\/api.github.com\/users\/xiuzhilu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/xiuzhilu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/xiuzhilu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/xiuzhilu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/xiuzhilu\/orgs","repos_url":"https:\/\/api.github.com\/users\/xiuzhilu\/repos","events_url":"https:\/\/api.github.com\/users\/xiuzhilu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/xiuzhilu\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi ! Do you have some kind of proxy in your browser that gives you access to internet ?\r\n\r\nMaybe you're having this error because you don't have access to this URL from python ?"],"created_at":1636022846000,"updated_at":1642600726000,"closed_at":null,"author_association":"NONE","active_lock_reason":null,"body":"when I use python examples\/pytorch\/translation\/run_translation.py --model_name_or_path examples\/pytorch\/translation\/opus-mt-en-ro --do_train --do_eval --source_lang en --target_lang ro --dataset_name wmt16 --dataset_config_name ro-en --output_dir \/tmp\/tst-translation --per_device_train_batch_size=4 --per_device_eval_batch_size=4 --overwrite_output_dir --predict_with_generate to finetune translation model on huggingface, I get the issue\"ConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.15.1\/datasets\/wmt16\/wmt16.py\".But I can open the https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.15.1\/datasets\/wmt16\/wmt16.py by using website. What should I do to solve the issue?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3210\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3210\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3209","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3209\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3209\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3209\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3209","id":1044505771,"node_id":"I_kwDODunzps4-QeSr","number":3209,"title":"Unpin keras once TF fixes its release","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636017332000,"updated_at":1636109857000,"closed_at":1636109857000,"author_association":"MEMBER","active_lock_reason":null,"body":"Related to:\r\n- #3208","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3209\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3209\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3208","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3208\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3208\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3208\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3208","id":1044504093,"node_id":"PR_kwDODunzps4uFTIs","number":3208,"title":"Pin keras version until TF fixes its release","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636017212000,"updated_at":1636018255000,"closed_at":1636018254000,"author_association":"MEMBER","active_lock_reason":null,"body":"Fix #3207.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3208\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3208\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3208","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3208","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3208.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3208.patch","merged_at":1636018254000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3207","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3207\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3207\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3207\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3207","id":1044496389,"node_id":"I_kwDODunzps4-QcAF","number":3207,"title":"CI error: Another metric with the same name already exists in Keras 2.7.0","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1636016651000,"updated_at":1636018254000,"closed_at":1636018254000,"author_association":"MEMBER","active_lock_reason":null,"body":"## Describe the bug\r\nRelease of TensorFlow 2.7.0 contains an incompatibility with Keras. See:\r\n- keras-team\/keras#15579\r\n\r\nThis breaks our CI test suite: https:\/\/app.circleci.com\/pipelines\/github\/huggingface\/datasets\/8493\/workflows\/055c7ae2-43bc-49b4-9f11-8fc71f35a25c\/jobs\/52363\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3207\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3207\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3206","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3206\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3206\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3206\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3206","id":1044216270,"node_id":"PR_kwDODunzps4uEZJe","number":3206,"title":"[WIP] Allow user-defined hash functions via a registry","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi @BramVanroy, thanks for your PR.\r\n\r\nThere was a bug in TensorFlow\/Keras. We have made a temporary fix in master branch. Please, merge master into your PR branch, so that the CI tests pass.\r\n\r\n```\r\ngit checkout registry\r\ngit fetch upstream master\r\ngit merge upstream\/master\r\n```","@albertvillanova Done. Although new tests will need to be added. I am looking for some feedback on my initial proposal in this PR. Reviews and ideas welcome!","Hi ! Thanks for diving into this :)\r\n\r\nWith this approach you get the right hash when doing `Hasher.hash(nlp)` but if you try to hash an object that has `nlp` as one of its attributes for example you will get different hashes every time.\r\n\r\nThis is because `Hasher.hash` is not recursive itself. Indeed what happens when you try to hash an object is that:\r\n1. it is dumped with our custom `dill` pickler (which is recursive)\r\n2. the bytes of the dump are hashed\r\n\r\nTo fix this we must integrate the custom hashing as a custom pickler dumping instead.\r\n\r\nNote that we're only using the `pickler.dumps` method and not `pickler.loads` since we only use it to get hashes, so it doesn't matter if `loads` doesn't reconstruct the object exactly. What's important it only to capture all the necessary information that defines how the object transforms the data (here `nlp.to_bytes()` determines how the spacy pipeline transforms the text).\r\n\r\nOur pickler already has a registry and you can register new dump functions with:\r\n```python\r\nimport dill\r\nimport spacy\r\nfrom datasets.utils.py_utils import pklregister\r\n\r\n@pklregister(spacy.Language)\r\ndef _save_spacy_language(pickler, nlp):\r\n pickler.save_reduce(...) # I think we can use nlp.to_bytes() here\r\n dill._dill.log.info(...)\r\n```\r\n\r\nYou can find some examples of custom dump functions in `py_utils.py`","Ah, darn it. Completely missed that register. Time wasted, unfortunately. \r\n\r\nTo better understand what you mean, I figured I'd try the basis of your snippet and I've noticed quite an annoying side-effect of how the pickle dispatch table seems to work. It explicitly uses an object's [`type()`](https:\/\/github.com\/python\/cpython\/blob\/87032cfa3dc975d7442fd57dea2c6a56d31c911a\/Lib\/pickle.py#L557-L558), which makes sense for pickling some (primitive) types it is not ideal for more complex ones, I think. `Hasher.hash` has the same issue as far as I can tell.\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/d21ce54f2c2782f854f975eb1dc2be6f923b4314\/src\/datasets\/fingerprint.py#L187-L191\r\n\r\nThis is very restrictive, and won't work for subclasses. In the case of spaCy, for instance, we register `Language`, but `nlp` is an instance of `English`, which is a _subclass_ of `Language`. These are different types, and so they will not match in the dispatch table. Maybe this is more general approach to cover such cases? Something like this is a start but too broad, but ideally a hierarchy is constructed and traversed of all classes in the table and the lowest class is selected to ensure that the most specific class function is dispatched.\r\n\r\n```python\r\n def hash(cls, value: Any) -> str:\r\n # Try to match the exact type\r\n if type(value) in cls.dispatch:\r\n return cls.dispatch[type(value)](cls, value)\r\n\r\n # Try to match instance (superclass)\r\n for type_cls, func in cls.dispatch.items():\r\n if isinstance(value, type_cls):\r\n return cls.dispatch[type_cls](cls, value)\r\n\r\n return cls.hash_default(value)\r\n```\r\n\r\nThis does not solve the problem for pickling, though. That is quite unfortunate IMO because that implies that users always have to specify the most specific class, which is not always obvious. (For instance, `spacy.load`'s signature returns `Language`, but as said before a subclass might be returned.)\r\n\r\nSecond, I am trying to understand `save_reduce` but I can find very little documentation about it, only the source code which is quite cryptic. Can you explain it a bit? The required arguments are not very clear to me and there is no docstring.\r\n\r\n```python\r\n def save_reduce(self, func, args, state=None, listitems=None, dictitems=None, obj=None):\r\n```","Here is an example illustrating the problem with sub-classes.\r\n\r\n```python\r\nimport spacy\r\n\r\nfrom spacy import Language\r\nfrom spacy.lang.en import English\r\n\r\nfrom datasets.utils.py_utils import Pickler, pklregister\r\n\r\n# Only useful in the registry (matching with `nlp`)\r\n# if you swap it out for very specific `English`\r\n@pklregister(English)\r\ndef hash_spacy_language(pickler, nlp):\r\n pass\r\n\r\n\r\ndef main():\r\n print(Pickler.dispatch)\r\n nlp = spacy.load(\"en_core_web_sm\")\r\n print(f\"NLP type {type(nlp)} in dispatch table? \", type(nlp) in Pickler.dispatch)\r\n\r\n\r\nif __name__ == '__main__':\r\n main()\r\n```","Indeed that's not ideal.\r\nMaybe we could integrate all the subclasses directly in `datasets`. That's simple to do but the catch is that if users have new subclasses of `Language` it won't work.\r\n\r\nOtherwise we can see how to make the API simpler for users by allowing subclasses\r\n```python\r\n# if you swap it out for very specific `English`\r\n@pklregister(Language, allow_subclasses=True)\r\ndef hash_spacy_language(pickler, nlp):\r\n pass\r\n```\r\n\r\nHere is an idea how to make this work, let me know what you think:\r\n\r\nWhen `Pickler.dumps` is called, it uses `Pickler.save_global` which is a method that is going to be called recursively on all the objects. We can customize this part, and make it work as we want when it encounters a subclass of `Language`.\r\n\r\nFor example when it encounters a subclass of `Language`, we can dynamically register the hashing function for the subclass (`English` for example) in `Pickler.save_global`, right before calling the actual `dill.Pickler.save_global(self, obj, name=name)`:\r\n```python\r\npklregister(type(obj))(hash_function_registered_for_parent_class)\r\ndill.Pickler.save_global(self, obj, name=name)\r\n```\r\n\r\nIn practice that means we can have an additional dispatch dictionary (similar to `Pickler.dispatch`) to store the hashing functions when `allow_subclasses=True`, and use this dictionary in `Pickler.save_global` to check if we need to use a hashing function registered with `allow_subclasses=True` and get `hash_function_registered_for_parent_class`.","If I understood you correctly, I do not think that that is enough because you are only doing this for a type and its direct parent class. You could do this for all superclasses (so traverse all ancestors and find the registered function for the first that is encountered). I can work on that, if you agree. The one thing that I am not sure about is how you want to create the secondary dispatch table. An empty dict as class variable in Pickler? (It doesn't have to be a true dispatcher, I think.)\r\n\r\nI do not think that dynamic registration is the ideal situation (it feels a bit hacky). An alternative would be to subclass Pickle and Dill to make sure that instead of just type() checking in the dispatch table also superclasses are considered. But that is probably overkill.","> You could do this for all superclasses (so traverse all ancestors and find the registered function for the first that is encountered)\r\n\r\nThat makes sense indeed !\r\n\r\n> The one thing that I am not sure about is how you want to create the secondary dispatch table. An empty dict as class variable in Pickler? (It doesn't have to be a true dispatcher, I think.)\r\n\r\nSure, let's try to not use too complicated stuff\r\n\r\n> I do not think that dynamic registration is the ideal situation (it feels a bit hacky). An alternative would be to subclass Pickle and Dill to make sure that instead of just type() checking in the dispatch table also superclasses are considered. But that is probably overkill.\r\n\r\nIndeed that would feel less hacky, but maybe it's too complex just for this. I feel like this part of the library is already hard to understand when you're not familiar with pickle. IMO having only a few changes that are simpler to understand is better than having a rewrite of `dill`'s core code.\r\n\r\nThanks a lot for your insights, it looks like we're going to have something that works well and that unlocks some nice flexibility for users :) Feel free to ping me anytime if I can help on this","Sure, thanks for brainstorming! I'll try to work on it this weekend. Will also revert the current changes in this PR and rename it. ","It seems like this is going in the right direction :). \r\n\r\n@BramVanroy Just one small suggestion for future contributions: instead of using `WIP` in the PR title, you can create a draft PR if you're still working on it.","Maybe I should just create a new (draft) PR then, seeing that I'll have to rename and revert the changes anyway? I'll link to this PR so that the discussion is at least referenced.","I can convert this PR to a draft PR. Let me know what would you prefer.","I think reverting my previous commits would make for a dirty (or confusing) commit history, so I'll just create a new one. Thanks."],"created_at":1635981942000,"updated_at":1636115891000,"closed_at":1636115884000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Inspired by the discussion on hashing in https:\/\/github.com\/huggingface\/datasets\/issues\/3178#issuecomment-959016329, @lhoestq suggested that it would be neat to allow users more control over the hashing process. Specifically, it would be great if users can specify specific hashing functions depending on the **class** of the object.\r\n\r\nAs an example, we found in the linked topic that loaded spaCy models (`Language` objects) have different hashes when `dump`'d, but their byte representation with `Language.to_bytes()` _is_ deterministic. It would therefore be great if we could specify that for `Language` objects, the hasher should hash the objects `to_bytes()` return value instead of the object itself.\r\n\r\nThis PR adds a new, but tiny, dependency to manage the registry, namely [`catalogue`](https:\/\/github.com\/explosion\/catalogue). \r\n\r\nTwo files have been changed (apart from the added dependency in `setup.py`) and one file has been added.\r\n\r\n**utils.registry** (added)\r\n\r\nThis file defines our custom Registry and builds a registry called \"hashers\". A Registry is basically dictionary from names (str) to functions. A function can be added to the registry by a decorator, e.g. \r\n\r\n```python\r\n@hashers.register(spacy.Language)\r\ndef hash_spacy_language(nlp):\r\n return Hasher.hash(nlp.to_bytes())\r\n```\r\n\r\nYou'll notice that `spacy.Language` is not a string, even though the registry holds a str->func mapping. To accomplish this with classes in a dynamic way, catalogue.Registry needed to be subclassed and modified as `DatasetsRegistry`. All methods that use a name as an input are now modified so that classes are deterministically converted in strings in such a way that we can later retrieve the actual class from the string (below).\r\n\r\n**utils.py_utils** (modified)\r\n\r\nAdded two functions to deal with classes and their qualified names, that is, their full descriptive name including the module. On the one hand it allows us to retrieve a string from a given class, e.g. given `Module` class, return `torch.nn.Module` str. Conversly, a function is added to convert such a full qualified name into a class. For instance, given the string `torch.nn.Module`, return the `Module` class. These straightforward methods allow us to interchangeably use classes and strings without any needed user interaction - they can just register a class, and behind the scenes `DatasetsRegistry` converts these to deterministic strings.\r\n\r\n**fingerprint** (modified)\r\n\r\nUpdated Hasher.hash so that if the object to hash is an instance of a class in the registry, the registered function is used to hash the object instead of the default behavior. To do so we iterate over the registry `hashers` and convert its keys (strings) into classes, and then we can use `isinstance`.\r\n\r\n```python\r\n# Check if the current object is an instance that is\r\n# applicable to the user-defined hashers. If so, hash\r\n# with the user-defined function\r\nfor full_module_name, func in hashers.get_all().items():\r\n registered_cls = get_cls_from_qualname(full_module_name)\r\n if isinstance(value, registered_cls):\r\n return func(value)\r\n```\r\n\r\n**Putting it all together**\r\n\r\nTo test this, you can try the following example with spaCy. First install spaCy from source and checkout a specific commit.\r\n\r\n```shell\r\ngit clone https:\/\/github.com\/explosion\/spaCy.git\r\ncd spaCy\/\r\ngit checkout cab9209c3dfcd1b75dfe5657f10e52c4d847a3cf\r\ncd ..\r\n\r\ngit clone https:\/\/github.com\/BramVanroy\/datasets.git\r\ncd datasets\r\ngit checkout registry\r\npip install -e .\r\npip install ..\/spaCy\r\nspacy download en_core_web_sm\r\n```\r\n\r\nNow you can run the following script. By default it will use the custom hasher function for the Language object. You can enable the default behavior by commenting out `@hashers.register...`.\r\n\r\n```python\r\nimport spacy\r\n\r\nfrom datasets.fingerprint import Hasher\r\nfrom datasets.utils.registry import hashers\r\n\r\n# Register a function so that when the Hasher encounters a spacy.Language object\r\n# it uses this custom function to hash instead of the default\r\n@hashers.register(spacy.Language)\r\ndef hash_spacy_language(nlp):\r\n return Hasher.hash(nlp.to_bytes())\r\n\r\n\r\ndef main():\r\n print(hashers.get_all())\r\n nlp = spacy.load(\"en_core_web_sm\")\r\n dump1 = Hasher.hash(nlp)\r\n nlp = spacy.load(\"en_core_web_sm\")\r\n dump2 = Hasher.hash(nlp)\r\n print(dump1)\r\n # succeeds when using the registered custom function\r\n # fails if using the default\r\n assert dump1 == dump2\r\n\r\n\r\nif __name__ == '__main__':\r\n main()\r\n```\r\n\r\nTo do\r\n====\r\n- The above is just a proof-of-concept. I am open to changes\/suggestions\r\n- Tests still need to be written\r\n- We should consider whether we can make `DatasetsRegistry` very restrictive and ONLY allowing classes. That would make testing easier - otherwise we also need to test for other sorts of objects.\r\n- Maybe the `hashers` definition is better suited in `fingerprint`?\r\n- Documentation\/examples need to be updated\r\n- Not sure why the logger is not working in `hash()`\r\n- `get_cls_from_qualname` might need a fail-safe: is it possible for a full_qualname to not have a module, and if so how do we deal with that?\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3206\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3206\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3206","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3206","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3206.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3206.patch","merged_at":null},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3205","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3205\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3205\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3205\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3205","id":1044099561,"node_id":"PR_kwDODunzps4uEAlw","number":3205,"title":"Add Multidoc2dial Dataset","user":{"login":"sivasankalpp","id":7344617,"node_id":"MDQ6VXNlcjczNDQ2MTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7344617?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sivasankalpp","html_url":"https:\/\/github.com\/sivasankalpp","followers_url":"https:\/\/api.github.com\/users\/sivasankalpp\/followers","following_url":"https:\/\/api.github.com\/users\/sivasankalpp\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sivasankalpp\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sivasankalpp\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sivasankalpp\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sivasankalpp\/orgs","repos_url":"https:\/\/api.github.com\/users\/sivasankalpp\/repos","events_url":"https:\/\/api.github.com\/users\/sivasankalpp\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sivasankalpp\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["@songfeng cc","Hi @sivasankalpp, thanks for your PR.\r\n\r\nThere was a bug in TensorFlow\/Keras. We have made a temporary fix in our master branch. Please, merge master into your PR branch, so that the CI tests pass.\r\n\r\n```\r\ngit checkout multidoc2dial\r\ngit fetch upstream master\r\ngit merge upstream\/master\r\n```","Hi @albertvillanova, I have merged master into my PR branch. All tests are passing. \r\nPlease take a look when you get a chance, thanks! \r\n","Thanks for your feedback @lhoestq. We addressed your comments in the latest commit. Let us know if everything looks okay :) "],"created_at":1635972511000,"updated_at":1637775169000,"closed_at":1637772908000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"This PR adds the MultiDoc2Dial dataset introduced in this [paper](https:\/\/arxiv.org\/pdf\/2109.12595v1.pdf )","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3205\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3205\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3205","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3205","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3205.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3205.patch","merged_at":1637772908000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3204","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3204\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3204\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3204\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3204","id":1043707307,"node_id":"I_kwDODunzps4-NbWr","number":3204,"title":"FileNotFoundError for TupleIE dataste","user":{"login":"arda-vianai","id":75334917,"node_id":"MDQ6VXNlcjc1MzM0OTE3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/75334917?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/arda-vianai","html_url":"https:\/\/github.com\/arda-vianai","followers_url":"https:\/\/api.github.com\/users\/arda-vianai\/followers","following_url":"https:\/\/api.github.com\/users\/arda-vianai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/arda-vianai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/arda-vianai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/arda-vianai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/arda-vianai\/orgs","repos_url":"https:\/\/api.github.com\/users\/arda-vianai\/repos","events_url":"https:\/\/api.github.com\/users\/arda-vianai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/arda-vianai\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["@mariosasko @lhoestq Could you give me an update on how to load the dataset after the fix?\r\nThanks.","Hi @arda-vianai,\r\n\r\nfirst, you can try:\r\n```python\r\nimport datasets\r\ndataset = datasets.load_dataset('tuple_ie', 'all', revision=\"master\")\r\n```\r\nIf this doesn't work, your version of `datasets` is missing some features that are required to run the dataset script, so install the master version with the following command:\r\n```\r\npip install git+https:\/\/github.com\/huggingface\/datasets.git\r\n```\r\nand then:\r\n```python\r\nimport datasets\r\ndataset = datasets.load_dataset('tuple_ie', 'all')\r\n```\r\nshould work (even without `revision`).","@mariosasko \r\nThanks, it is working now. I actually did that before but I didn't restart the kernel. I restarted it and it works now. My bad!!!\r\nMany thanks and great job!\r\n-arda"],"created_at":1635951415000,"updated_at":1636127475000,"closed_at":1636121765000,"author_association":"NONE","active_lock_reason":null,"body":"Hi,\r\n`dataset = datasets.load_dataset('tuple_ie', 'all')`\r\n\r\nreturns a FileNotFound error. Is the data not available? \r\n\r\nMany thanks.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3204\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3204\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3203","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3203\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3203\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3203\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3203","id":1043552766,"node_id":"PR_kwDODunzps4uCNoT","number":3203,"title":"Updated: DaNE - updated URL for download","user":{"login":"MalteHB","id":47593213,"node_id":"MDQ6VXNlcjQ3NTkzMjEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47593213?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MalteHB","html_url":"https:\/\/github.com\/MalteHB","followers_url":"https:\/\/api.github.com\/users\/MalteHB\/followers","following_url":"https:\/\/api.github.com\/users\/MalteHB\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MalteHB\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MalteHB\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MalteHB\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MalteHB\/orgs","repos_url":"https:\/\/api.github.com\/users\/MalteHB\/repos","events_url":"https:\/\/api.github.com\/users\/MalteHB\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MalteHB\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Actually it looks like the old URL is still working, and it's also the one that is mentioned in https:\/\/github.com\/alexandrainst\/danlp\/blob\/master\/docs\/docs\/datasets.md\r\n\r\nWhat makes you think we should use the new URL ?","@lhoestq Sorry! I might have jumped to conclusions a bit too fast here... \r\n\r\nI was working in Google Colab and got an error that it was unable to use the URL. I then forked the project, updated the URL, ran it locally and it worked. I therefore assumed that my URL update fixed the issue, however, I see now that it might rather be a Google Colab issue... \r\n\r\nStill - this seems to be the official URL for downloading the dataset, and I think that it will be most beneficial to use. :-) ","It looks like they're using these new urls for their new datasets. Maybe let's change to the new URL in case the old one stops working at one point. Thanks"],"created_at":1635944113000,"updated_at":1636031676000,"closed_at":1636026403000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"It seems that DaNLP has updated their download URLs and it therefore also needs to be updated in here...","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3203\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3203\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3203","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3203","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3203.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3203.patch","merged_at":1636026403000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3202","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3202\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3202\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3202\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3202","id":1043213660,"node_id":"I_kwDODunzps4-Li1c","number":3202,"title":"Add mIoU metric","user":{"login":"NielsRogge","id":48327001,"node_id":"MDQ6VXNlcjQ4MzI3MDAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48327001?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NielsRogge","html_url":"https:\/\/github.com\/NielsRogge","followers_url":"https:\/\/api.github.com\/users\/NielsRogge\/followers","following_url":"https:\/\/api.github.com\/users\/NielsRogge\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NielsRogge\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NielsRogge\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NielsRogge\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NielsRogge\/orgs","repos_url":"https:\/\/api.github.com\/users\/NielsRogge\/repos","events_url":"https:\/\/api.github.com\/users\/NielsRogge\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NielsRogge\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1635928952000,"updated_at":1635929684000,"closed_at":null,"author_association":"NONE","active_lock_reason":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nRecently, some semantic segmentation models were added to HuggingFace Transformers, including [SegFormer](https:\/\/huggingface.co\/transformers\/model_doc\/segformer.html) and [BEiT](https:\/\/huggingface.co\/transformers\/model_doc\/beit.html).\r\n\r\nSemantic segmentation (which is the task of labeling every pixel of an image with a corresponding class) is typically evaluated using the Mean Intersection and Union (mIoU). Together with the upcoming Image Feature, adding this metric could be very handy when creating example scripts to fine-tune any Transformer-based model on a semantic segmentation dataset.\r\n\r\nAn implementation can be found [here](https:\/\/github.com\/open-mmlab\/mmsegmentation\/blob\/504965184c3e6bc9ec43af54237129ef21981a5f\/mmseg\/core\/evaluation\/metrics.py#L132) for instance.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3202\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3202\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3201","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3201\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3201\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3201\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3201","id":1043209142,"node_id":"I_kwDODunzps4-Lhu2","number":3201,"title":"Add GSM8K dataset","user":{"login":"NielsRogge","id":48327001,"node_id":"MDQ6VXNlcjQ4MzI3MDAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48327001?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NielsRogge","html_url":"https:\/\/github.com\/NielsRogge","followers_url":"https:\/\/api.github.com\/users\/NielsRogge\/followers","following_url":"https:\/\/api.github.com\/users\/NielsRogge\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NielsRogge\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NielsRogge\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NielsRogge\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NielsRogge\/orgs","repos_url":"https:\/\/api.github.com\/users\/NielsRogge\/repos","events_url":"https:\/\/api.github.com\/users\/NielsRogge\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NielsRogge\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1635928604000,"updated_at":1635928604000,"closed_at":null,"author_association":"NONE","active_lock_reason":null,"body":"## Adding a Dataset\r\n- **Name:** GSM8K (short for Grade School Math 8k)\r\n- **Description:** GSM8K is a dataset of 8.5K high quality linguistically diverse grade school math word problems created by human problem writers.\r\n- **Paper:** https:\/\/openai.com\/blog\/grade-school-math\/\r\n- **Data:** https:\/\/github.com\/openai\/grade-school-math\r\n- **Motivation:** The dataset is useful to investigate the reasoning abilities of large Transformer models, such as GPT-3.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3201\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3201\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3200","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3200\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3200\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3200\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3200","id":1042887291,"node_id":"PR_kwDODunzps4uAZLu","number":3200,"title":"Catch token invalid error in CI","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1635890186000,"updated_at":1635932468000,"closed_at":1635932468000,"author_association":"MEMBER","active_lock_reason":null,"body":"The staging back end sometimes returns invalid token errors when trying to delete a repo.\r\nI modified the fixture in the test that uses staging to ignore this error","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3200\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3200\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3200","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3200","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3200.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3200.patch","merged_at":1635932468000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3199","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3199\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3199\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3199\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3199","id":1042860935,"node_id":"PR_kwDODunzps4uAVzQ","number":3199,"title":"Bump huggingface_hub","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1635888550000,"updated_at":1636854491000,"closed_at":1635889300000,"author_association":"MEMBER","active_lock_reason":null,"body":"huggingface_hub just released its first minor version, so we need to update the dependency\r\n\r\nIt was supposed to be part of 1.15.0 but I'm adding it for 1.15.1","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3199\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3199\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3199","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3199","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3199.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3199.patch","merged_at":1635889300000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3198","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3198\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3198\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3198\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3198","id":1042679548,"node_id":"PR_kwDODunzps4t_5G8","number":3198,"title":"Add Multi-Lingual LibriSpeech","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1635877439000,"updated_at":1636045762000,"closed_at":1636045762000,"author_association":"MEMBER","active_lock_reason":null,"body":"Add https:\/\/www.openslr.org\/94\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3198\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3198\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3198","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3198","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3198.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3198.patch","merged_at":1636045762000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3197","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3197\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3197\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3197\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3197","id":1042541127,"node_id":"PR_kwDODunzps4t_cry","number":3197,"title":"Fix optimized encoding for arrays","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1635868553000,"updated_at":1635880344000,"closed_at":1635880343000,"author_association":"MEMBER","active_lock_reason":null,"body":"Hi !\r\n\r\n#3124 introduced a regression that made the benchmarks CI fail because of a bad array comparison when checking the first encoded element. This PR fixes this by making sure that encoding is applied on all sequence types except lists.\r\n\r\ncc @eladsegal fyi (no big deal)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3197\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3197\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3197","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3197","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3197.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3197.patch","merged_at":1635880343000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3196","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3196\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3196\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3196\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3196","id":1042223913,"node_id":"PR_kwDODunzps4t-bxy","number":3196,"title":"QOL improvements: auto-flatten_indices and desc in map calls","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1635852530000,"updated_at":1635867669000,"closed_at":1635867668000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"This PR:\r\n* automatically calls `flatten_indices` where needed: in `unique` and `save_to_disk` to avoid saving the indices file\r\n* adds descriptions to the map calls\r\n\r\nFix #3040 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3196\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3196\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3196","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3196","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3196.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3196.patch","merged_at":1635867668000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3195","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3195\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3195\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3195\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3195","id":1042204044,"node_id":"PR_kwDODunzps4t-ZR0","number":3195,"title":"More robust `None` handling","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["I also created a PR regarding `disable_nullable` that must be always `False` by default, in order to always allow None values\r\nhttps:\/\/github.com\/huggingface\/datasets\/pull\/3211","@lhoestq I addressed your comments, added tests, did some refactoring to make the implementation cleaner and added support for `None` values in `map` transforms when the feature type is `ArrayXD` (previously, I only implemented `None` decoding).\r\n\r\nMy only concern is that during decoding `ArrayXD` arrays with `None` values will be auto-casted to `float64` to allow `np.nan` insertion and this might be unexpected if `dtype` is not `float`, so one option would be to allow `None` values only if the storage type is `float32` or `float64`. Let me know WDYT would be the most consistent behavior here.","Cool ! :D\r\n> My only concern is that during decoding ArrayXD arrays with None values will be auto-casted to float64 to allow np.nan insertion and this might be unexpected if dtype is not float, so one option would be to allow None values only if the storage type is float32 or float64. Let me know WDYT would be the most consistent behavior here.\r\n\r\nYes that makes sense to only fill with nan if the type is compatible","After some more experimenting, I think we can keep auto-cast to float because PyArrow also does it:\r\n```python\r\nimport pyarrow as pa\r\narr = pa.array([1, 2, 3, 4, None], type=pa.int32()).to_numpy(zero_copy_only=False) # None present - int32 -> float64\r\nassert arr.dtype == np.float64\r\n```\r\nAdditional changes:\r\n* fixes a bug in the `_is_zero_copy_only` implementation for the ArraXD types. Previously, `_is_zero_copy_only` would always return False for these types. Still have to see if it's possible to optimize copying of the non-extension types (`Sequence`, ...), but I plan to work on that in a separate PR.\r\n* https:\/\/github.com\/huggingface\/datasets\/pull\/2891 introduced a bug where the dtype of `ArrayXD` wouldn't be preserved due to `to_pylist` call in NumPy Formatter (`np.array(np.array(..).tolist())` doesn't necessarily preserve dtype of the initial array), so I'm also fixing that. ","The CI fail for windows is unrelated to this PR, merging"],"created_at":1635851710000,"updated_at":1639060020000,"closed_at":1639060018000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"PyArrow has explicit support for `null` values, so it makes sense to support Nones on our side as well.\r\n\r\n[Colab Notebook with examples](https:\/\/colab.research.google.com\/drive\/1zcK8BnZYnRe3Ao2271u1T19ag9zLEiy3?usp=sharing)\r\n\r\nChanges:\r\n* allow None for the features types with special encoding (`ClassLabel, TranslationVariableLanguages, Value, _ArrayXD`)\r\n* handle None in `class_encode_column` (also there is an option to stringify Nones and treat them as a class)\r\n* support None sorting in `sort` (use pandas for that) \r\n* handle None in align_labels_with_mapping\r\n* support for None in ArrayXD (converts `None` to `np.nan` to align the behavior with PyArrow)\r\n* support for None in the Audio\/Image feature\r\n* allow promotion when concatenating tables (`pa.concat_tables(table_list, promote=True)`) and `null` row\/~~column~~ broadcasting similar to pandas \r\n\r\nAdditional notes:\r\n* use `null` instead of `none` for function arguments for consistency with existing `disable_nullable` \r\n* fixes a bug with the `update_metadata_with_features` call in `Dataset.rename_columns`\r\n* had to update some tests, let me know if that's ok\r\n\r\nTODO:\r\n- [x] check how the Audio features behaves with Nones\r\n- [x] Better None handling in `concatenate_datasets`\/`add_item`\r\n- [x] Fix formatting with Nones\r\n- [x] Add Colab with examples\r\n- [x] Tests\r\n\r\nTODOs for subsequent PRs:\r\n- Mention None handling in the docs\r\n- Add `drop_null`\/`fill_null` to `Dataset`\/`DatasetDict`\r\n\r\nFix #3181 #3253","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3195\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3195\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3195","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3195","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3195.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3195.patch","merged_at":1639060017000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3194","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3194\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3194\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3194\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3194","id":1041999535,"node_id":"PR_kwDODunzps4t91Eg","number":3194,"title":"Update link to Datasets Tagging app in Spaces","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1635840830000,"updated_at":1636367783000,"closed_at":1636367782000,"author_association":"MEMBER","active_lock_reason":null,"body":"Fix #3193.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3194\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3194\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3194","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3194","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3194.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3194.patch","merged_at":1636367782000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3193","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3193\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3193\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3193\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3193","id":1041971117,"node_id":"I_kwDODunzps4-Gzet","number":3193,"title":"Update link to datasets-tagging app","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1635838799000,"updated_at":1636367782000,"closed_at":1636367782000,"author_association":"MEMBER","active_lock_reason":null,"body":"Once datasets-tagging has been transferred to Spaces:\r\n- huggingface\/datasets-tagging#22\r\n\r\nWe should update the link in Datasets.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3193\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3193\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3192","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3192\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3192\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3192\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3192","id":1041308086,"node_id":"I_kwDODunzps4-ERm2","number":3192,"title":"Multiprocessing filter\/map (tests) not working on Windows","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1635780968000,"updated_at":1635782223000,"closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"While running the tests, I found that the multiprocessing examples fail on Windows, or rather they do not complete: they cause a deadlock. I haven't dug deep into it, but they do not seem to work as-is. I currently have no time to tests this in detail but at least the tests seem not to run correctly (deadlocking).\r\n\r\n## Steps to reproduce the bug\r\n```shell\r\npytest tests\/test_arrow_dataset.py -k \"test_filter_multiprocessing\"\r\npytest tests\/test_arrow_dataset.py -k \"test_map_multiprocessing\"\r\n```\r\n\r\n## Expected results\r\nThe functionality to work on all platforms.\r\n\r\n## Actual results\r\nDeadlock.\r\n\r\n## Environment info\r\n- `datasets` version: 1.14.1.dev0\r\n- Platform: Windows-10-10.0.19041-SP0\r\n- Python version: 3.9.2, also tested with 3.7.9\r\n- PyArrow version: 4.0.1\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3192\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3192\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3191","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3191\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3191\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3191\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3191","id":1041225111,"node_id":"I_kwDODunzps4-D9WX","number":3191,"title":"Dataset viewer issue for '*compguesswhat*'","user":{"login":"benotti","id":2545336,"node_id":"MDQ6VXNlcjI1NDUzMzY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2545336?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/benotti","html_url":"https:\/\/github.com\/benotti","followers_url":"https:\/\/api.github.com\/users\/benotti\/followers","following_url":"https:\/\/api.github.com\/users\/benotti\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/benotti\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/benotti\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/benotti\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/benotti\/orgs","repos_url":"https:\/\/api.github.com\/users\/benotti\/repos","events_url":"https:\/\/api.github.com\/users\/benotti\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/benotti\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1635776209000,"updated_at":1635776209000,"closed_at":null,"author_association":"NONE","active_lock_reason":null,"body":"## Dataset viewer issue for '*compguesswhat*'\r\n\r\n**Link:** https:\/\/huggingface.co\/datasets\/compguesswhat\r\n\r\nFile not found\r\n\r\nAm I the one who added this dataset ? No\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3191\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3191\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3190","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3190\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3190\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3190\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3190","id":1041153631,"node_id":"I_kwDODunzps4-Dr5f","number":3190,"title":"combination of shuffle and filter results in a bug","user":{"login":"rabeehk","id":6278280,"node_id":"MDQ6VXNlcjYyNzgyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6278280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehk","html_url":"https:\/\/github.com\/rabeehk","followers_url":"https:\/\/api.github.com\/users\/rabeehk\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehk\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehk\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehk\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["I cannot reproduce this on master and pyarrow==4.0.1.\r\n","Hi ! There was a regression in `datasets` 1.12 that introduced this bug. It has been fixed in #3019 in 1.13\r\n\r\nCan you try to update `datasets` and try again ?","Thanks a lot, fixes with 1.13"],"created_at":1635772049000,"updated_at":1635850249000,"closed_at":1635850249000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"## Describe the bug\r\nHi,\r\nI would like to shuffle a dataset, then filter it based on each existing label. however, the combination of `filter`, `shuffle` seems to results in a bug. In the minimal example below, as you see in the filtered results, the filtered labels are not unique, meaning filter has not worked. Any suggestions as a temporary fix is appreciated @lhoestq.\r\n\r\nThanks.\r\n Best regards\r\nRabeeh \r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport numpy as np\r\nimport datasets \r\n\r\ndatasets = datasets.load_dataset('super_glue', 'rte', script_version=\"master\")\r\nshuffled_data = datasets[\"train\"].shuffle(seed=42)\r\nfor label in range(2):\r\n print(\"label \", label)\r\n data = shuffled_data.filter(lambda example: int(example['label']) == label)\r\n print(\"length \", len(data), np.unique(data['label']))\r\n```\r\n\r\n## Expected results\r\nFiltering per label, should only return the data with that specific label.\r\n\r\n## Actual results\r\nAs you can see, filtered data per label, has still two labels of [0, 1]\r\n```\r\nlabel 0\r\nlength 1249 [0 1]\r\nlabel 1\r\nlength 1241 [0 1]\r\n```\r\n## Environment info\r\n\r\n- `datasets` version: 1.12.1 \r\n- Platform: linux \r\n- Python version: 3.7.11 \r\n- PyArrow version: 5.0.0 \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3190\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3190\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3189","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3189\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3189\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3189\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3189","id":1041044986,"node_id":"I_kwDODunzps4-DRX6","number":3189,"title":"conll2003 incorrect label explanation","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi @BramVanroy,\r\n\r\nsince these fields are of type `ClassLabel` (you can check this with `dset.features`), you can inspect the possible values with:\r\n```python\r\ndset.features[field_name].feature.names # .feature because it's a sequence of labels\r\n```\r\n\r\nand to find the mapping between names and integers, use: \r\n```python\r\ndset.features[field_name].feature.int2str(value_or_values_list) # map integer value to string value\r\n# or\r\ndset.features[field_name].feature.str2int(value_or_values_list) # map string value to integer value\r\n```\r\n\r\n"],"created_at":1635764610000,"updated_at":1636454458000,"closed_at":1636454458000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"In the [conll2003](https:\/\/huggingface.co\/datasets\/conll2003#data-fields) README, the labels are described as follows\r\n\r\n> - `id`: a `string` feature.\r\n> - `tokens`: a `list` of `string` features.\r\n> - `pos_tags`: a `list` of classification labels, with possible values including `\"` (0), `''` (1), `#` (2), `$` (3), `(` (4).\r\n> - `chunk_tags`: a `list` of classification labels, with possible values including `O` (0), `B-ADJP` (1), `I-ADJP` (2), `B-ADVP` (3), `I-ADVP` (4).\r\n> - `ner_tags`: a `list` of classification labels, with possible values including `O` (0), `B-PER` (1), `I-PER` (2), `B-ORG` (3), `I-ORG` (4) `B-LOC` (5), `I-LOC` (6) `B-MISC` (7), `I-MISC` (8).\r\n\r\nFirst of all, it would be great if we can get a list of ALL possible pos_tags.\r\n\r\nSecond, the chunk tags labels cannot be correct. The description says the values go from 0 to 4 whereas the data shows values from at least 11 to 21 and 0.\r\n\r\nEDIT: not really a bug, sorry for mistagging.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3189\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3189\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3188","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3188\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3188\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3188\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3188","id":1040980712,"node_id":"I_kwDODunzps4-DBro","number":3188,"title":"conll2002 issues","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi ! Thanks for reporting :)\r\n\r\nThis is related to https:\/\/github.com\/huggingface\/datasets\/issues\/2742, I'm working on it. It should fix the viewer for around 80 datasets.\r\n","Ah, hadn't seen that sorry.\r\n\r\nThe scrambled \"point of contact\" is a separate issue though, I think.","@lhoestq The \"point of contact\" is still an issue.","It will be fixed in https:\/\/github.com\/huggingface\/datasets\/pull\/3274, thanks"],"created_at":1635760164000,"updated_at":1636984259000,"closed_at":1636737491000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"**Link:** https:\/\/huggingface.co\/datasets\/conll2002\r\n\r\nThe dataset viewer throws a server error when trying to preview the dataset. \r\n\r\n```\r\nMessage: Extraction protocol 'train' for file at 'https:\/\/raw.githubusercontent.com\/teropa\/nlp\/master\/resources\/corpora\/conll2002\/esp.train' is not implemented yet\r\n```\r\n\r\nIn addition, the \"point of contact\" has encoding issues and does not work when clicked.\r\n\r\nAm I the one who added this dataset ? No, @lhoestq did","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3188\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3188\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3187","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3187\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3187\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3187\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3187","id":1040412869,"node_id":"PR_kwDODunzps4t44Ab","number":3187,"title":"Add ChrF(++) (as implemented in sacrebleu)","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1635670438000,"updated_at":1635864650000,"closed_at":1635863486000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Similar to my [PR for TER](https:\/\/github.com\/huggingface\/datasets\/pull\/3153), it feels only right to also include ChrF and friends. These are present in Sacrebleu and are therefore very similar to implement as TER and sacrebleu. I tested the implementation with sacrebleu's tests to verify. You can try this below for yourself\r\n\r\n```python\r\nimport datasets\r\n\r\n\r\nEPSILON = 1e-4\r\nchrf = datasets.load_metric(r\"path\\to\\datasets\\metrics\\chrf\")\r\n\r\ntest_cases = [\r\n ([\"abcdefg\"], [\"hijklmnop\"], 0.0),\r\n ([\"a\"], [\"b\"], 0.0),\r\n ([\"\"], [\"b\"], 0.0),\r\n ([\"\"], [\"ref\"], 0.0),\r\n ([\"\"], [\"reference\"], 0.0),\r\n ([\"aa\"], [\"ab\"], 8.3333),\r\n ([\"a\", \"b\"], [\"a\", \"c\"], 8.3333),\r\n ([\"a\"], [\"a\"], 16.6667),\r\n ([\"a b c\"], [\"a b c\"], 50.0),\r\n ([\"a b c\"], [\"abc\"], 50.0),\r\n ([\" risk assessment must be made of those who are qualified and expertise in the sector - these are the scientists .\"],\r\n [\"risk assessment has to be undertaken by those who are qualified and expert in that area - that is the scientists .\"], 63.361730),\r\n ([\" Die Beziehung zwischen Obama und Netanjahu ist nicht gerade freundlich. \"],\r\n [\"Das Verh\u00e4ltnis zwischen Obama und Netanyahu ist nicht gerade freundschaftlich.\"], 64.1302698),\r\n ([\"Niemand hat die Absicht, eine Mauer zu errichten\"], [\"Niemand hat die Absicht, eine Mauer zu errichten\"], 100.0),\r\n]\r\n\r\nfor hyp, ref, score in test_cases:\r\n # Note the reference transformation which is different from scarebleu's input format\r\n results = chrf.compute(predictions=hyp, references=[[r] for r in ref],\r\n char_order=6, word_order=0, beta=3, eps_smoothing=True)\r\n if abs(score - results[\"score\"]) > EPSILON:\r\n print(f\"expected {score}, got {results['score']} for {hyp} - {ref}\")\r\n\r\ntest_cases_effective_order = [\r\n ([\"a\"], [\"a\"], 100.0),\r\n ([\"\"], [\"reference\"], 0.0),\r\n ([\"a b c\"], [\"a b c\"], 100.0),\r\n ([\"a b c\"], [\"abc\"], 100.0),\r\n ([\"\"], [\"c\"], 0.0),\r\n ([\"a\", \"b\"], [\"a\", \"c\"], 50.0),\r\n ([\"aa\"], [\"ab\"], 25.0),\r\n]\r\n\r\nfor hyp, ref, score in test_cases_effective_order:\r\n # Note the reference transformation which is different from scarebleu's input format\r\n results = chrf.compute(predictions=hyp, references=[[r] for r in ref],\r\n char_order=6, word_order=0, beta=3, eps_smoothing=False)\r\n if abs(score - results[\"score\"]) > EPSILON:\r\n print(f\"expected {score}, got {results['score']} for {hyp} - {ref}\")\r\n\r\ntest_cases_keep_whitespace = [\r\n (\r\n [\"Die Beziehung zwischen Obama und Netanjahu ist nicht gerade freundlich.\"],\r\n [\"Das Verh\u00e4ltnis zwischen Obama und Netanyahu ist nicht gerade freundschaftlich.\"],\r\n 67.3481606,\r\n ),\r\n (\r\n [\"risk assessment must be made of those who are qualified and expertise in the sector - these are the scientists .\"],\r\n [\"risk assessment has to be undertaken by those who are qualified and expert in that area - that is the scientists .\"],\r\n 65.2414427,\r\n ),\r\n]\r\n\r\nfor hyp, ref, score in test_cases_keep_whitespace:\r\n # Note the reference transformation which is different from scarebleu's input format\r\n results = chrf.compute(predictions=hyp, references=[[r] for r in ref],\r\n char_order=6, word_order=0, beta=3,\r\n whitespace=True)\r\n if abs(score - results[\"score\"]) > EPSILON:\r\n print(f\"expected {score}, got {results['score']} for {hyp} - {ref}\")\r\n\r\n\r\npredictions = [\"The relationship between Obama and Netanyahu is not exactly friendly.\"]\r\nreferences = [[\"The ties between Obama and Netanyahu are not particularly friendly.\"]]\r\nprint(chrf.compute(predictions=predictions, references=references))\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3187\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3187\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3187","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3187","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3187.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3187.patch","merged_at":1635863486000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3186","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3186\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3186\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3186\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3186","id":1040369397,"node_id":"I_kwDODunzps4-Asb1","number":3186,"title":"Dataset viewer for nli_tr","user":{"login":"e-budur","id":2246791,"node_id":"MDQ6VXNlcjIyNDY3OTE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2246791?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/e-budur","html_url":"https:\/\/github.com\/e-budur","followers_url":"https:\/\/api.github.com\/users\/e-budur\/followers","following_url":"https:\/\/api.github.com\/users\/e-budur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/e-budur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/e-budur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/e-budur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/e-budur\/orgs","repos_url":"https:\/\/api.github.com\/users\/e-budur\/repos","events_url":"https:\/\/api.github.com\/users\/e-budur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/e-budur\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1635652593000,"updated_at":1635652593000,"closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"## Dataset viewer issue for '*nli_tr*'\r\n\r\n**Link:** https:\/\/huggingface.co\/datasets\/nli_tr\r\n\r\nHello,\r\n\r\nThank you for the new dataset preview feature that will help the users to view the datasets online.\r\n\r\nWe just noticed that the dataset viewer widget in the `nli_tr` dataset shows the error below. The error must be due to a temporary problem that may have blocked access to the dataset through the dataset viewer. But the dataset is currently accessible through the link in the error message. May we kindly ask if it would be possible to rerun the job so that it can access the dataset for the dataset viewer function?\r\n\r\nThank you.\r\nEmrah\r\n\r\n\r\n------------------------------------------\r\nServer Error\r\nStatus code: 404\r\nException: FileNotFoundError\r\nMessage: [Errno 2] No such file or directory: 'zip:\/\/snli_tr_1.0_train.jsonl::https:\/\/tabilab.cmpe.boun.edu.tr\/datasets\/nli_datasets\/snli_tr_1.0.zip\r\n------------------------------------------\r\n\r\nAm I the one who added this dataset ? Yes\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3186\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3186\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3185","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3185\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3185\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3185\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3185","id":1040291961,"node_id":"I_kwDODunzps4-AZh5","number":3185,"title":"7z dataset preview not implemented?","user":{"login":"Kirili4ik","id":30757466,"node_id":"MDQ6VXNlcjMwNzU3NDY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30757466?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Kirili4ik","html_url":"https:\/\/github.com\/Kirili4ik","followers_url":"https:\/\/api.github.com\/users\/Kirili4ik\/followers","following_url":"https:\/\/api.github.com\/users\/Kirili4ik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Kirili4ik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Kirili4ik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Kirili4ik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Kirili4ik\/orgs","repos_url":"https:\/\/api.github.com\/users\/Kirili4ik\/repos","events_url":"https:\/\/api.github.com\/users\/Kirili4ik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Kirili4ik\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1635625107000,"updated_at":1635625107000,"closed_at":null,"author_association":"NONE","active_lock_reason":null,"body":"## Dataset viewer issue for dataset 'samsum'\r\n\r\n**Link:** https:\/\/huggingface.co\/datasets\/samsum\r\n\r\nServer Error\r\nStatus code: 400\r\nException: NotImplementedError\r\nMessage: Extraction protocol '7z' for file at 'https:\/\/arxiv.org\/src\/1911.12237v2\/anc\/corpus.7z' is not implemented yet\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3185\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3185\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3184","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3184\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3184\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3184\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3184","id":1040114102,"node_id":"PR_kwDODunzps4t4J61","number":3184,"title":"RONEC v2","user":{"login":"dumitrescustefan","id":22746816,"node_id":"MDQ6VXNlcjIyNzQ2ODE2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22746816?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dumitrescustefan","html_url":"https:\/\/github.com\/dumitrescustefan","followers_url":"https:\/\/api.github.com\/users\/dumitrescustefan\/followers","following_url":"https:\/\/api.github.com\/users\/dumitrescustefan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dumitrescustefan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dumitrescustefan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dumitrescustefan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dumitrescustefan\/orgs","repos_url":"https:\/\/api.github.com\/users\/dumitrescustefan\/repos","events_url":"https:\/\/api.github.com\/users\/dumitrescustefan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dumitrescustefan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["@lhoestq Thanks for the review. I totally understand what you are saying. Normally, I would definitely agree with you, but in this particular case, the quality of v1 is poor, and the dataset itself is small (at the time we created v1 it was the only RO NER dataset, and its size was limited by the available resources). \r\n\r\nThis is why we worked to build a larger one, with much better inter-annotator agreement. Fact is, models trained on v1 will be of very low quality and I would not recommend to anybody to use\/do that. That's why I'd strongly suggest we replace v1 with v2, and kindof make v1 vanish :) \r\n\r\nWhat do you think? If you insist on having v1 accessible, I'll add the required code. Thanks!\r\n\r\n","Ok I see ! I think it's fine then, no need to re-add V1"],"created_at":1635591003000,"updated_at":1635868943000,"closed_at":1635868942000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Hi, as we've recently finished with the new RONEC (Romanian Named Entity Corpus), we'd like to update the dataset here as well. It's actually essential as links to V1 are no longer valid. \r\n\r\nIn reality we'd like to replace completely v1, as v2 is a full re-annotation of v1 with additional data (up to 2x size vs v1).\r\n\r\nI've run the make style and all the dummy and real data test, and they passed.\r\n\r\nI hope it's okay to merge the new RONEC v2 in the datasets. \r\n\r\nThanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3184\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3184\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3184","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3184","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3184.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3184.patch","merged_at":1635868942000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3183","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3183\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3183\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3183\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3183","id":1039761120,"node_id":"PR_kwDODunzps4t3Dag","number":3183,"title":"Add missing docstring to DownloadConfig","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1635526595000,"updated_at":1635848738000,"closed_at":1635848737000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Document the `use_etag` and `num_proc` attributes in `DownloadConig`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3183\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3183\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3183","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3183","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3183.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3183.patch","merged_at":1635848737000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3182","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3182\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3182\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3182\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3182","id":1039739606,"node_id":"PR_kwDODunzps4t2-9J","number":3182,"title":"Don't memoize strings when hashing since two identical strings may have different python ids","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["This change slows down the hash computation a little bit but from my tests it doesn't look too impactful. So I think it's fine to merge this."],"created_at":1635524777000,"updated_at":1635845738000,"closed_at":1635845737000,"author_association":"MEMBER","active_lock_reason":null,"body":"When hashing an object that has several times the same string, the hashing could return a different hash if the identical strings share the same python `id()` or not.\r\n\r\nHere is an example code that shows how the issue can affect the caching:\r\n```python\r\nimport json\r\nimport pyarrow as pa\r\nfrom datasets.features import Features\r\nfrom datasets.fingerprint import Hasher\r\n\r\nschema = pa.schema([pa.field(\"some_string\", pa.string()), pa.field(\"another_string\", pa.string())])\r\nfeatures_from_schema = Features.from_arrow_schema(schema)\r\nHasher.hash(features_from_schema) # dffa9dca9a73fd8c\r\n\r\nfeatures_dict = json.loads('{\"some_string\": {\"dtype\": \"string\", \"id\": null, \"_type\": \"Value\"}, \"another_string\": {\"dtype\": \"string\", \"id\": null, \"_type\": \"Value\"}}')\r\nfeatures_from_json = Features.from_dict(features_dict)\r\nHasher.hash(features_from_json) # 3812e76b15e6420e\r\n\r\nfeatures_from_schema == features_from_json # True\r\n```\r\n\r\nThis is because in `features_dict`, some strings like \"dtype\" are repeated but don't share the same id, contrary to the ones in `features_from_schema`.\r\n\r\nI fixed that by disabling memoization for strings.\r\n\r\nThis could be optimized in the future by implementing a smarter memoization with a special handling for strings.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3182\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3182\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3182","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3182","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3182.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3182.patch","merged_at":1635845737000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3181","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3181\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3181\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3181\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3181","id":1039682097,"node_id":"I_kwDODunzps49-Eox","number":3181,"title":"`None` converted to `\"None\"` when loading a dataset","user":{"login":"eladsegal","id":13485709,"node_id":"MDQ6VXNlcjEzNDg1NzA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13485709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eladsegal","html_url":"https:\/\/github.com\/eladsegal","followers_url":"https:\/\/api.github.com\/users\/eladsegal\/followers","following_url":"https:\/\/api.github.com\/users\/eladsegal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eladsegal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eladsegal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eladsegal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eladsegal\/orgs","repos_url":"https:\/\/api.github.com\/users\/eladsegal\/repos","events_url":"https:\/\/api.github.com\/users\/eladsegal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eladsegal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi @eladsegal, thanks for reporting.\r\n\r\n@mariosasko I saw you are already working on this, but maybe my comment will be useful to you.\r\n\r\nAll values are casted to their corresponding feature type (including `None` values). For example if the feature type is `Value(\"bool\")`, `None` is casted to `False`.\r\n\r\nIt is true that strings were an exception, but this was recently fixed by @lhoestq (see #3158).","Thanks for reporting.\r\n\r\nThis is actually a breaking change that I think can cause issues when users preprocess their data. String columns used to be nullable. Maybe we can correct https:\/\/github.com\/huggingface\/datasets\/pull\/3158 to keep the None values and avoid this breaking change ?\r\n\r\nEDIT: the other types (bool, int, etc) can also become nullable IMO","So what would be the best way to handle a feature that can have a null value in some of the instances? So far I used `None`.\r\nUsing the empty string won't be a good option, as it can be an actual value in the data and is not the same as not having a value at all.","Hi @eladsegal,\r\n\r\nUse `None`. As @albertvillanova correctly pointed out, this change in conversion was introduced (by mistake) in #3158. To avoid it, install the earlier revision with:\r\n```\r\npip install git+https:\/\/github.com\/huggingface\/datasets.git@8107844ec0e7add005db0585c772ee20adc01a5e\r\n```\r\n\r\nI'm making all the feature types nullable as we speak, and the fix will be merged probably early next week.","Hi @mariosasko, is there an estimation as to when this issue will be fixed?","https:\/\/github.com\/huggingface\/datasets\/pull\/3195 fixed it, we'll do a new release soon :)\r\n\r\nFor now feel free to install `datasets` from the master branch","Thanks, but unfortunately looks like it isn't fixed yet \ud83d\ude22 \r\n[notebook for 1.14.0](https:\/\/colab.research.google.com\/drive\/1SV3sFXPJMWSQgbm4pr9Y1Q8OJ4JYKcDo?usp=sharing)\r\n[notebook for master](https:\/\/colab.research.google.com\/drive\/145wDpuO74MmsuI0SVLcI1IswG6aHpyhi?usp=sharing)","Oh, sorry. I deleted the fix by accident when I was resolving a merge conflict. Let me fix this real quick.","Thank you, it works! \ud83c\udf8a "],"created_at":1635521033000,"updated_at":1639185400000,"closed_at":1639060017000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"## Describe the bug\r\nWhen loading a dataset `None` values of the type `NoneType` are converted to `'None'` of the type `str`.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\nqasper = load_dataset(\"qasper\", split=\"train\", download_mode=\"reuse_cache_if_exists\")\r\nprint(qasper[60][\"full_text\"][\"section_name\"])\r\n```\r\n\r\nWhen installing version 1.1.40, the output is\r\n`[None, 'Introduction', 'Benchmark Datasets', ...]`\r\n\r\nWhen installing from the master branch, the output is\r\n`['None', 'Introduction', 'Benchmark Datasets', ...]`\r\n\r\nNotice how the first element was changed from `NoneType` to `str`.\r\n\r\n## Expected results\r\n`None` should stay as is.\r\n\r\n## Actual results\r\n`None` is converted to a string.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: master\r\n- Platform: Linux-4.4.0-19041-Microsoft-x86_64-with-glibc2.17\r\n- Python version: 3.8.10\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3181\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3181\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3180","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3180\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3180\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3180\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3180","id":1039641316,"node_id":"PR_kwDODunzps4t2qQn","number":3180,"title":"fix label mapping","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["heck, test failings. moving to draft. will come back to this later today hopefully","Thanks for fixing this :)\r\nI just updated the dataset_infos.json and added the missing `pretty_name` tag to the dataset card","thank you @lhoestq! running around as always it felt through as a lower priority..."],"created_at":1635518544000,"updated_at":1635860467000,"closed_at":1635849432000,"author_association":"MEMBER","active_lock_reason":null,"body":"Fixing label mapping for hlgd.\r\n0 correponds to same event and 1 corresponds to different event\r\n
\r\n
\r\nnt","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3180\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3180\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3180","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3180","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3180.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3180.patch","merged_at":1635849432000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3179","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3179\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3179\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3179\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3179","id":1039571928,"node_id":"I_kwDODunzps499pvY","number":3179,"title":"Cannot load dataset when the config name is \"special\"","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["The issue is that the datasets are malformed. Not a bug with the datasets library"],"created_at":1635514247000,"updated_at":1635514521000,"closed_at":1635514521000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"## Describe the bug\r\n\r\nAfter https:\/\/github.com\/huggingface\/datasets\/pull\/3159, we can get the config name of \"Check\/region_1\", which is \"Check___region_1\".\r\n\r\nBut now we cannot load the dataset (not sure it's related to the above PR though). It's the case for all the similar datasets, listed in https:\/\/github.com\/huggingface\/datasets-preview-backend\/issues\/78\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n>>> from datasets import get_dataset_config_names\r\n>>> get_dataset_config_names(\"Check\/region_1\")\r\n['Check___region_1']\r\n>>> load_dataset(\"Check\/region_1\")\r\nUsing custom data configuration Check___region_1-d2b3bc48f11c9be2\r\nDownloading and preparing dataset json\/Check___region_1 to \/home\/slesage\/.cache\/huggingface\/datasets\/json\/Check___region_1-d2b3bc48f11c9be2\/0.0.0\/c2d554c3377ea79c7664b93dc65d0803b45e3279000f993c7bfd18937fd7f426...\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 1\/1 [00:00<00:00, 4443.12it\/s]\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 1\/1 [00:00<00:00, 1277.19it\/s]\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/datasets\/load.py\", line 1632, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 697, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 1159, in _prepare_split\r\n writer.write_table(table)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/datasets\/arrow_writer.py\", line 442, in write_table\r\n pa_table = pa.Table.from_arrays([pa_table[name] for name in self._schema.names], schema=self._schema)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/datasets\/arrow_writer.py\", line 442, in \r\n pa_table = pa.Table.from_arrays([pa_table[name] for name in self._schema.names], schema=self._schema)\r\n File \"pyarrow\/table.pxi\", line 1249, in pyarrow.lib.Table.__getitem__\r\n File \"pyarrow\/table.pxi\", line 1825, in pyarrow.lib.Table.column\r\n File \"pyarrow\/table.pxi\", line 1800, in pyarrow.lib.Table._ensure_integer_index\r\nKeyError: 'Field \"builder_name\" does not exist in table schema'\r\n```\r\n\r\nLoading in streaming mode also returns something strange:\r\n\r\n```python\r\n>>> list(load_dataset(\"Check\/region_1\", streaming=True, split=\"train\"))\r\nUsing custom data configuration Check___region_1-d2b3bc48f11c9be2\r\n[{'builder_name': None, 'citation': '', 'config_name': None, 'dataset_size': None, 'description': '', 'download_checksums': None, 'download_size': None, 'features': {'speech': {'feature': {'dtype': 'float64', 'id': None, '_type': 'Value'}, 'length': -1, 'id': None, '_type': 'Sequence'}, 'sampling_rate': {'dtype': 'int64', 'id': None, '_type': 'Value'}, 'label': {'dtype': 'string', 'id': None, '_type': 'Value'}}, 'homepage': '', 'license': '', 'post_processed': None, 'post_processing_size': None, 'size_in_bytes': None, 'splits': None, 'supervised_keys': None, 'task_templates': None, 'version': None}, {'_data_files': [{'filename': 'dataset.arrow'}], '_fingerprint': 'f1702bb5533c549c', '_format_columns': ['speech', 'sampling_rate', 'label'], '_format_kwargs': {}, '_format_type': None, '_indexes': {}, '_indices_data_files': None, '_output_all_columns': False, '_split': None}]\r\n```\r\n\r\n## Expected results\r\n\r\nThe dataset should be loaded\r\n\r\n## Actual results\r\n\r\nAn error occurs\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.14.1.dev0\r\n- Platform: Linux-5.11.0-1020-aws-x86_64-with-glibc2.31\r\n- Python version: 3.9.6\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3179\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3179\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3178","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3178\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3178\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3178\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3178","id":1039539076,"node_id":"I_kwDODunzps499huE","number":3178,"title":"\"Property couldn't be hashed properly\" even though fully picklable","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["After some digging, I found that this is caused by `dill` and using `recurse=True)` when trying to dump the object. The problem also occurs without multiprocessing. I can only find [the following information](https:\/\/dill.readthedocs.io\/en\/latest\/dill.html#dill._dill.dumps) about this:\r\n\r\n> If recurse=True, then objects referred to in the global dictionary are recursively traced and pickled, instead of the default behavior of attempting to store the entire global dictionary. This is needed for functions defined via exec().\r\n\r\nIn the utils, this is explicitly enabled\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/df63614223bf1dd1feb267d39d741bada613352c\/src\/datasets\/utils\/py_utils.py#L327-L330\r\n\r\nIs this really necessary? Is there a way around it? Also pinging the spaCy team in case this is easy to solve on their end. (I hope so.)","Hi ! Thanks for reporting\r\n\r\nYes `recurse=True` is necessary to be able to hash all the objects that are passed to the `map` function\r\n\r\nEDIT: hopefully this object can be serializable soon, but otherwise we can consider adding more control to the user on how to hash objects that are not serializable (as mentioned in https:\/\/github.com\/huggingface\/datasets\/issues\/3044#issuecomment-948818210)","I submitted a PR to spacy that should fix this issue (linked above). I'll leave this open until that PR is merged. ","@lhoestq After some testing I find that even with the updated spaCy, no cache files are used. I do not get any warnings though, but I can see that map is run every time I run the code. Do you have thoughts about why? If you want to try the tests below, make sure to install spaCy from [here](https:\/\/github.com\/BramVanroy\/spaCy) and installing the base model with `python -m spacy download en_core_web_sm`.\r\n\r\n```python\r\nfrom functools import partial\r\nfrom pathlib import Path\r\n\r\nimport spacy\r\nfrom datasets import Dataset\r\nimport datasets\r\ndatasets.logging.set_verbosity_debug()\r\n\r\ndef tokenize(nlp, l):\r\n return {\"tok\": [t.text for t in nlp(l[\"text\"])]}\r\n\r\ndef main():\r\n fin = r\"some\/file\/with\/many\/lines\"\r\n lines = Path(fin).read_text(encoding=\"utf-8\").splitlines()\r\n nlp = spacy.load(\"en_core_web_sm\")\r\n ds = Dataset.from_dict({\"text\": lines, \"text_id\": list(range(len(lines)))})\r\n tok = partial(tokenize, nlp)\r\n ds = ds.map(tok, load_from_cache_file=True)\r\n print(ds[0:2])\r\n\r\nif __name__ == '__main__':\r\n main()\r\n```\r\n\r\n... or with load_dataset (here I get the message that `load_dataset` can reuse the dataset, but still I see all samples being processed via the tqdm progressbar):\r\n\r\n```python\r\nfrom functools import partial\r\n\r\nimport spacy\r\nfrom datasets import load_dataset\r\nimport datasets\r\ndatasets.logging.set_verbosity_debug()\r\n\r\ndef tokenize(nlp, sample):\r\n return {\"tok\": [t.text for t in nlp(sample[\"text\"])]}\r\n\r\ndef main():\r\n fin = r\"some\/file\/with\/many\/lines\"\r\n nlp = spacy.load(\"en_core_web_sm\")\r\n tok_func = partial(tokenize, nlp)\r\n ds = load_dataset('text', data_files=fin)\r\n ds = ds[\"train\"].map(tok_func)\r\n print(ds[0:2])\r\n\r\nif __name__ == '__main__':\r\n main()\r\n```","It looks like every time you load `en_core_web_sm` you get a different python object:\r\n```python\r\nimport spacy\r\nfrom datasets.fingerprint import Hasher\r\n\r\nnlp1 = spacy.load(\"en_core_web_sm\")\r\nnlp2 = spacy.load(\"en_core_web_sm\")\r\nHasher.hash(nlp1), Hasher.hash(nlp2)\r\n# ('f6196a33882fea3b', 'a4c676a071f266ff')\r\n```\r\nHere is a list of attributes that have different hashes for `nlp1` and `nlp2`:\r\n- tagger\r\n- parser\r\n- entity\r\n- pipeline (it's the list of the three attributes above)\r\n\r\nI just took a look at the tagger for example and I found subtle differences (there may be other differences though):\r\n```python\r\nnlp1.tagger.model.tok2vec.embed.id, nlp2.tagger.model.tok2vec.embed.id\r\n# (1721, 2243)\r\n```\r\n\r\nWe can try to find all the differences and find the best way to hash those objects properly","Thanks for searching! I went looking, and found that this is an implementation detail of thinc\r\n\r\nhttps:\/\/github.com\/explosion\/thinc\/blob\/68691e303ae68cae4bc803299016f1fc064328bf\/thinc\/model.py#L96-L98\r\n\r\nPresumably (?) exactly to distinguish between different parts in memory when multiple models are loaded. Do not think that this can be changed on their end - but I will ask what exactly it is for (I'm curious).\r\n\r\nDo you think it is overkill to write something into the hasher explicitly to deal with spaCy models? It seems like something that is beneficial to many, but I do not know if you are open to adding third-party-specific ways to deal with this. If you are, I can have a look for this specific case how we can ignore `thinc.Model.id` from the hasher.","It can be even simpler to hash the bytes of the pipeline instead\r\n```python\r\nnlp1.to_bytes() == nlp2.to_bytes() # True\r\n```\r\n\r\nIMO we should integrate the custom hashing for spacy models into `datasets` (we use a custom Pickler for that).\r\nWhat could be done on Spacy's side instead (if they think it's nice to have) is to implement a custom pickling for these classes using `to_bytes`\/`from_bytes` to have deterministic pickle dumps.\r\n\r\nFinally I think it would be nice in the future to add an API to let `datasets` users control this kind of things. Something like being able to define your own hashing if you use complex objects.\r\n```python\r\n@datasets.register_hash(spacy.language.Language)\r\ndef hash_spacy_language(nlp):\r\n return Hasher.hash(nlp.to_bytes())\r\n```","I do not quite understand what you mean. as far as I can tell, using `to_bytes` does a pickle dump behind the scene (with `srsly`), recursively using `to_bytes` on the required objects. Therefore, the result of `to_bytes` is a deterministic pickle dump AFAICT. Or do you mean that you wish that using your own pickler and running `dumps(nlp)` should also be deterministic? I guess that would require `__setstate__` and `__getstate__` methods on all the objects that have to\/from_bytes. I'll have a listen over at spaCy what they think, and if that would solve the issue. I'll try this locally first, if I find the time.\r\n\r\nI agree that having the option to use a custom hasher would be useful. I like your suggestion!\r\n\r\nEDIT: after trying some things and reading through their API, it seems that they explicitly do not want this. https:\/\/spacy.io\/usage\/saving-loading#pipeline\r\n\r\n> When serializing the pipeline, keep in mind that this will only save out the binary data for the individual components to allow spaCy to restore them \u2013 not the entire objects. This is a good thing, because it makes serialization safe. But it also means that you have to take care of storing the config, which contains the pipeline configuration and all the relevant settings.\r\n\r\nBest way forward therefore seems to implement the ability to specify a hasher depending on the objects that are pickled, as you suggested. I can work on this if that is useful. I could use some pointers as to how you would like to implement the `register_hash` functionality though. I assume using `catalogue` over at Explosion might be a good starting point.\r\n\r\n","Interestingly, my PR does not solve the issue discussed above. The `tokenize` function hash is different on every run, because for some reason `nlp.__call__` has a different hash every time. The issue therefore seems to run much deeper than I thought. If you have any ideas, I'm all ears.\r\n\r\n```shell\r\ngit clone https:\/\/github.com\/explosion\/spaCy.git\r\ncd spaCy\/\r\ngit checkout cab9209c3dfcd1b75dfe5657f10e52c4d847a3cf\r\ncd ..\r\n\r\ngit clone https:\/\/github.com\/BramVanroy\/datasets.git\r\ncd datasets\r\ngit checkout registry\r\npip install -e .\r\npip install ..\/spaCy\r\nspacy download en_core_web_sm\r\n```\r\n\r\n```python\r\nimport spacy\r\n\r\nfrom datasets import load_dataset\r\nfrom datasets.fingerprint import Hasher\r\nfrom datasets.utils.registry import hashers\r\n\r\n@hashers.register(spacy.Language)\r\ndef hash_spacy_language(nlp):\r\n return Hasher.hash(nlp.to_bytes())\r\n\r\ndef main():\r\n fin = r\"your\/large\/file\"\r\n nlp = spacy.load(\"en_core_web_sm\")\r\n # This is now always the same yay!\r\n print(Hasher.hash(nlp))\r\n\r\n def tokenize(l):\r\n return {\"tok\": [t.text for t in nlp(l[\"text\"])]}\r\n\r\n ds = load_dataset(\"text\", data_files=fin)\r\n # But this is not...\r\n print(Hasher.hash(tokenize))\r\n # ... because of this\r\n print(Hasher.hash(nlp.__call__))\r\n ds = ds[\"train\"].map(tokenize)\r\n print(ds[0:2])\r\n\r\n\r\nif __name__ == '__main__':\r\n main()\r\n```","Hi ! I just answered in your PR :) In order for your custom hashing to be used for nested objects, you must integrate it into our recursive pickler that we use for hashing.","I don't quite understand the design constraints of `datasets` or the script that you're running, but my usual advice is to avoid using pickle unless you _absolutely_ have to. So for instance instead of doing your `partial` over the `nlp` object itself, can you just pass the string `en_core_web_sm` in? This will mean calling `spacy.load()` inside the work function, but this is no worse than having to call `pickle.load()` on the contents of the NLP object anyway -- in fact you'll generally find `spacy.load()` faster, apart from the disk read.\r\n\r\nIf you need to pass in the bytes data and don't want to read from disk, you could do something like this:\r\n\r\n```\r\nmsg = (nlp.lang, nlp.to_bytes())\r\n\r\ndef unpack(lang, bytes_data):\r\n return spacy.blank(lang).from_bytes(bytes_data)\r\n```\r\n\r\nI think that should probably work: the Thinc `model.to_dict()` method (which is used by the `model.to_bytes()` method) doesn't pack the model's ID into the message, so the `nlp.to_bytes()` that you get shouldn't be affected by the global IDs. So you should get a clean message from `nlp.to_bytes()` that doesn't depend on the global state.","Hi Matthew, thanks for chiming in! We are currently implementing exactly what you suggest: `to_bytes()` as a default before pickling - but we may prefer `to_dict` to avoid double dumping.\r\n\r\n`datasets` uses pickle dumps (actually dill) to get unique representations of processing steps (a \"fingerprint\" or hash). So it never needs to re-load that dump - it just needs its value to create a hash. If a fingerprint is identical to a cached fingerprint, then the result can be retrieved from the on-disk cache. (@lhoestq or @mariosasko can correct me if I'm wrong.)\r\n\r\nI was experiencing the issue that parsing with spaCy gave me a different fingerprint on every run of the script and thus it could never load the processed dataset from cache. At first I thought the reason was that spaCy Language objects were not picklable with recursive dill, but even after [adjusting for that](https:\/\/github.com\/explosion\/spaCy\/pull\/9593) the issue persisted. @lhoestq found that this is due to the changing `id`, which you discussed [here](https:\/\/github.com\/explosion\/spaCy\/discussions\/9609#discussioncomment-1661081). So yes, you are right. On the surface there simply seems to be an incompatibility between `datasets` default caching functionality as it is currently implemented and `spacy.Language`.\r\n\r\nThe [linked PR](https:\/\/github.com\/huggingface\/datasets\/pull\/3224) aims to remedy that, though. Up to now I have put some effort into making it easier to define your own \"pickling\" function for a given type (and optionally any of its subclasses). That allows us to tell `datasets` that instead of doing `dill.save(nlp)` (non-deterministic), to use `dill.save(nlp.to_bytes())` (deterministic). When I find some more time, the PR [will be expanded](https:\/\/github.com\/huggingface\/datasets\/pull\/3224#issuecomment-968958528) to improve the user-experience a bit and add a built-in function to pickle `spacy.Language` as one of the defaults (using `to_bytes()`)."],"created_at":1635512169000,"updated_at":1637228011000,"closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"## Describe the bug\r\nI am trying to tokenize a dataset with spaCy. I found that no matter what I do, the spaCy language object (`nlp`) prevents `datasets` from pickling correctly - or so the warning says - even though manually pickling is no issue. It should not be an issue either, since spaCy objects are picklable.\r\n\r\n## Steps to reproduce the bug\r\n\r\nHere is a [colab](https:\/\/colab.research.google.com\/drive\/1gt75LCBIzsmBMvvipEOvWulvyZseBiA7?usp=sharing) but for some reason I cannot reproduce it there. That may have to do with logging\/tqdm on Colab, or with running things in notebooks. I tried below code on Windows and Ubuntu as a Python script and getting the same issue (warning below).\r\n\r\n```python\r\nimport pickle\r\n\r\nfrom datasets import load_dataset\r\nimport spacy\r\n\r\n\r\nclass Processor:\r\n def __init__(self):\r\n self.nlp = spacy.load(\"en_core_web_sm\", disable=[\"tagger\", \"parser\", \"ner\", \"lemmatizer\"])\r\n\r\n @staticmethod\r\n def collate(batch):\r\n return [d[\"en\"] for d in batch]\r\n\r\n def parse(self, batch):\r\n batch = batch[\"translation\"]\r\n return {\"translation_tok\": [{\"en_tok\": \" \".join([t.text for t in doc])} for doc in self.nlp.pipe(self.collate(batch))]}\r\n\r\n def process(self):\r\n ds = load_dataset(\"wmt16\", \"de-en\", split=\"train[:10%]\")\r\n ds = ds.map(self.parse, batched=True, num_proc=6)\r\n\r\n\r\nif __name__ == '__main__':\r\n pr = Processor()\r\n\r\n # succeeds\r\n with open(\"temp.pkl\", \"wb\") as f:\r\n pickle.dump(pr, f)\r\n print(\"Successfully pickled!\")\r\n\r\n pr.process()\r\n\r\n```\r\n\r\n---\r\n\r\nHere is a small change that includes `Hasher.hash` that shows that the hasher cannot seem to successfully pickle parts form the NLP object.\r\n\r\n```python\r\n\r\nfrom datasets.fingerprint import Hasher\r\nimport pickle\r\n\r\nfrom datasets import load_dataset\r\nimport spacy\r\n\r\n\r\nclass Processor:\r\n def __init__(self):\r\n self.nlp = spacy.load(\"en_core_web_sm\", disable=[\"tagger\", \"parser\", \"ner\", \"lemmatizer\"])\r\n\r\n @staticmethod\r\n def collate(batch):\r\n return [d[\"en\"] for d in batch]\r\n\r\n def parse(self, batch):\r\n batch = batch[\"translation\"]\r\n return {\"translation_tok\": [{\"en_tok\": \" \".join([t.text for t in doc])} for doc in self.nlp.pipe(self.collate(batch))]}\r\n\r\n def process(self):\r\n ds = load_dataset(\"wmt16\", \"de-en\", split=\"train[:10]\")\r\n return ds.map(self.parse, batched=True)\r\n\r\n\r\nif __name__ == '__main__':\r\n pr = Processor()\r\n\r\n # succeeds\r\n with open(\"temp.pkl\", \"wb\") as f:\r\n pickle.dump(pr, f)\r\n print(\"Successfully pickled class instance!\")\r\n\r\n # succeeds\r\n with open(\"temp.pkl\", \"wb\") as f:\r\n pickle.dump(pr.nlp, f)\r\n print(\"Successfully pickled nlp!\")\r\n\r\n # fails\r\n print(Hasher.hash(pr.nlp))\r\n pr.process()\r\n```\r\n\r\n## Expected results\r\nThis to be picklable, working (fingerprinted), and no warning.\r\n\r\n## Actual results\r\nIn the first snippet, I get this warning \r\nParameter 'function'= of the transform datasets.arrow_dataset.Dataset._map_single couldn't be hashed properly, a random hash was used instead. Make sure your transforms and parameters are serializable with pickle or dill for the dataset fingerprinting and caching to work. If you reuse this transform, the caching mechanism will consider it to be different from the previous calls and recompute everything. This warning is only showed once. Subsequent hashing failures won't be showed.\r\n\r\nIn the second, I get this traceback which directs to the `Hasher.hash` line.\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 918, in save_global\r\n obj2, parent = _getattribute(module, name)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 266, in _getattribute\r\n .format(name, obj))\r\nAttributeError: Can't get local attribute 'add_codes..ErrorsWithCodes' on \r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \" scratch_4.py\", line 40, in \r\n print(Hasher.hash(pr.nlp))\r\n File \" \\lib\\site-packages\\datasets\\fingerprint.py\", line 191, in hash\r\n return cls.hash_default(value)\r\n File \" \\lib\\site-packages\\datasets\\fingerprint.py\", line 184, in hash_default\r\n return cls.hash_bytes(dumps(value))\r\n File \" \\lib\\site-packages\\datasets\\utils\\py_utils.py\", line 345, in dumps\r\n dump(obj, file)\r\n File \" \\lib\\site-packages\\datasets\\utils\\py_utils.py\", line 320, in dump\r\n Pickler(file, recurse=True).dump(obj)\r\n File \" \\lib\\site-packages\\dill\\_dill.py\", line 498, in dump\r\n StockPickler.dump(self, obj)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 409, in dump\r\n self.save(obj)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 521, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 634, in save_reduce\r\n save(state)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \" \\lib\\site-packages\\dill\\_dill.py\", line 990, in save_module_dict\r\n StockPickler.save_dict(pickler, obj)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 821, in save_dict\r\n self._batch_setitems(obj.items())\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 847, in _batch_setitems\r\n save(v)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 781, in save_list\r\n self._batch_appends(obj)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 805, in _batch_appends\r\n save(x)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 736, in save_tuple\r\n save(element)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 521, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 634, in save_reduce\r\n save(state)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 736, in save_tuple\r\n save(element)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \" \\lib\\site-packages\\dill\\_dill.py\", line 990, in save_module_dict\r\n StockPickler.save_dict(pickler, obj)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 821, in save_dict\r\n self._batch_setitems(obj.items())\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 847, in _batch_setitems\r\n save(v)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \" \\lib\\site-packages\\dill\\_dill.py\", line 1176, in save_instancemethod0\r\n pickler.save_reduce(MethodType, (obj.__func__, obj.__self__), obj=obj)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 610, in save_reduce\r\n save(args)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 736, in save_tuple\r\n save(element)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \" \\lib\\site-packages\\datasets\\utils\\py_utils.py\", line 523, in save_function\r\n obj=obj,\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 610, in save_reduce\r\n save(args)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 751, in save_tuple\r\n save(element)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \" \\lib\\site-packages\\dill\\_dill.py\", line 990, in save_module_dict\r\n StockPickler.save_dict(pickler, obj)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 821, in save_dict\r\n self._batch_setitems(obj.items())\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 847, in _batch_setitems\r\n save(v)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 521, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 605, in save_reduce\r\n save(cls)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \" \\lib\\site-packages\\dill\\_dill.py\", line 1439, in save_type\r\n StockPickler.save_global(pickler, obj, name=name)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 922, in save_global\r\n (obj, module_name, name))\r\n_pickle.PicklingError: Can't pickle .ErrorsWithCodes'>: it's not found as spacy.errors.add_codes..ErrorsWithCodes\r\n```\r\n\r\n## Environment info\r\nTried on both Linux and Windows\r\n\r\n- `datasets` version: 1.14.0\r\n- Platform: Windows-10-10.0.19041-SP0 + Python 3.7.9; Linux-5.11.0-38-generic-x86_64-with-Ubuntu-20.04-focal + Python 3.7.12\r\n- PyArrow version: 6.0.0\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3178\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3178\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3177","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3177\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3177\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3177\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3177","id":1039487780,"node_id":"I_kwDODunzps499VMk","number":3177,"title":"More control over TQDM when using map\/filter with multiple processes","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi,\r\n\r\nIt's hard to provide an API that would cover all use-cases with tqdm in this project.\r\n\r\nHowever, you can make it work by defining a custom decorator (a bit hacky tho) as follows:\r\n```python\r\nimport datasets\r\n\r\ndef progress_only_on_rank_0(func):\r\n def wrapper(*args, **kwargs):\r\n rank = kwargs.get(\"rank\")\r\n disable_tqdm = kwargs.get(\"disable_tqdm\", False)\r\n disable_tqdm = True if rank is not None and rank > 0 else disable_tqdm\r\n kwargs[\"disable_tqdm\"] = disable_tqdm\r\n return func(*args, **kwargs)\r\n return wrapper\r\n \r\ndatasets.Dataset._map_single = progress_only_on_rank_0(datasets.Dataset._map_single)\r\n``` \r\n\r\nEDIT: Ups, closed by accident.\r\n\r\nThanks for the provided links. `Trainer` requires this for training in multi-node distributed setting. However, `Dataset.map` doesn't support that yet.\r\n\r\nDo you have an API for this in mind? `Dataset.map` is already bloated with the arguments, so IMO it's not a good idea to add a new arg there.\r\n\r\n","Inspiration may be found at `transformers`.\r\n\r\nhttps:\/\/github.com\/huggingface\/transformers\/blob\/4a394cf53f05e73ab9bbb4b179a40236a5ffe45a\/src\/transformers\/trainer.py#L1231-L1233\r\n\r\nTo get unique IDs for each worker, see https:\/\/stackoverflow.com\/a\/10192611\/1150683"],"created_at":1635508576000,"updated_at":1635853130000,"closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"It would help with the clutter in my terminal if tqdm is only shown for rank 0 when using `num_proces>0` in the map and filter methods of datasets.\r\n\r\n```python\r\ndataset.map(lambda examples: tokenize(examples[\"text\"]), batched=True, num_proc=6)\r\n```\r\n\r\nThe above snippet leads to a lot of TQDM bars and depending on your terminal, these will not overwrite but keep pushing each other down.\r\n\r\n```\r\n #0: 0%| | 0\/13 [00:00, ?ba\/s]\r\n #1: 0%| | 0\/13 [00:00, ?ba\/s]\r\n #2: 0%| | 0\/13 [00:00, ?ba\/s]\r\n #3: 0%| | 0\/13 [00:00, ?ba\/s]\r\n #4: 0%| | 0\/13 [00:00, ?ba\/s]\r\n #5: 0%| | 0\/13 [00:00, ?ba\/s]\r\n #0: 8%| | 1\/13 [00:00, ?ba\/s]\r\n #1: 8%| | 1\/13 [00:00, ?ba\/s]\r\n...\r\n```\r\n\r\nInstead, it would be welcome if we had the option to only show the progress of rank 0.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3177\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3177\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3176","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3176\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3176\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3176\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3176","id":1039068312,"node_id":"PR_kwDODunzps4t00xS","number":3176,"title":"OpenSLR dataset: update generate_examples to properly extract data for SLR83","user":{"login":"tyrius02","id":4561309,"node_id":"MDQ6VXNlcjQ1NjEzMDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4561309?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tyrius02","html_url":"https:\/\/github.com\/tyrius02","followers_url":"https:\/\/api.github.com\/users\/tyrius02\/followers","following_url":"https:\/\/api.github.com\/users\/tyrius02\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tyrius02\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tyrius02\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tyrius02\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tyrius02\/orgs","repos_url":"https:\/\/api.github.com\/users\/tyrius02\/repos","events_url":"https:\/\/api.github.com\/users\/tyrius02\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tyrius02\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Also fix #3125."],"created_at":1635469167000,"updated_at":1636042845000,"closed_at":1635501849000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Fixed #3168.\r\n\r\nThe SLR38 indices are CSV files and there wasn't any code in openslr.py to process these files properly. The end result was an empty table.\r\n\r\nI've added code to properly process these CSV files.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3176\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3176\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3176","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3176","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3176.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3176.patch","merged_at":1635501849000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3175","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3175\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3175\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3175\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3175","id":1038945271,"node_id":"PR_kwDODunzps4t0bXw","number":3175,"title":"Add docs for `to_tf_dataset`","user":{"login":"stevhliu","id":59462357,"node_id":"MDQ6VXNlcjU5NDYyMzU3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59462357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stevhliu","html_url":"https:\/\/github.com\/stevhliu","followers_url":"https:\/\/api.github.com\/users\/stevhliu\/followers","following_url":"https:\/\/api.github.com\/users\/stevhliu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stevhliu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stevhliu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stevhliu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stevhliu\/orgs","repos_url":"https:\/\/api.github.com\/users\/stevhliu\/repos","events_url":"https:\/\/api.github.com\/users\/stevhliu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stevhliu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["This looks great, thank you!","Thanks !\r\n\r\nFor some reason the new GIF is 6MB, which is a bit heavy for an image on a website. The previous one was around 200KB though which is perfect. For a good experience we usually expect images to be less than 500KB - otherwise for users with poor connection it takes too long to load. Could you try to reduce its size ? Than I think we can merge :)"],"created_at":1635454522000,"updated_at":1635953976000,"closed_at":1635934043000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"This PR adds some documentation for new features released in v1.13.0, with the main addition being `to_tf_dataset`:\r\n\r\n- Show how to use `to_tf_dataset` in the tutorial, and move `set_format(type='tensorflow'...)` to the Process section (let me know if I'm missing anything @Rocketknight1 \ud83d\ude05).\r\n- Add an example for loading dataset from multiple zipped CSV files to the Load section.\r\n- Add an example for removing columns for an `IterableDataset`.\r\n- Add graphic for visualizing streaming.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3175\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3175\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3175","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3175","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3175.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3175.patch","merged_at":1635934043000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3174","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3174\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3174\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3174\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3174","id":1038427245,"node_id":"PR_kwDODunzps4tyuQ_","number":3174,"title":"Asserts replaced by exceptions (huggingface#3171)","user":{"login":"joseporiolayats","id":5772490,"node_id":"MDQ6VXNlcjU3NzI0OTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5772490?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/joseporiolayats","html_url":"https:\/\/github.com\/joseporiolayats","followers_url":"https:\/\/api.github.com\/users\/joseporiolayats\/followers","following_url":"https:\/\/api.github.com\/users\/joseporiolayats\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/joseporiolayats\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/joseporiolayats\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/joseporiolayats\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/joseporiolayats\/orgs","repos_url":"https:\/\/api.github.com\/users\/joseporiolayats\/repos","events_url":"https:\/\/api.github.com\/users\/joseporiolayats\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/joseporiolayats\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Your first PR went smoothly, well done!\r\nYou are welcome to continue contributing to this project.\r\nGr\u00e0cies, @joseporiolayats! \ud83d\ude09 "],"created_at":1635422145000,"updated_at":1636180532000,"closed_at":1635512923000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"I've replaced two asserts with their proper exceptions following the guidelines described in issue #3171 by following the contributing guidelines.\r\n\r\nPS: This is one of my first PRs, hoping I don't break anything!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3174\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3174\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3174","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3174","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3174.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3174.patch","merged_at":1635512923000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3173","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3173\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3173\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3173\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3173","id":1038404300,"node_id":"PR_kwDODunzps4typcA","number":3173,"title":"Fix issue with filelock filename being too long on encrypted filesystems","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1635420537000,"updated_at":1635500544000,"closed_at":1635500544000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Infer max filename length in filelock on Unix-like systems. Should fix problems on encrypted filesystems such as eCryptfs.\r\n\r\nFix #2924 \r\n\r\ncc: @lmmx","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3173\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3173\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3173","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3173","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3173.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3173.patch","merged_at":1635500544000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3172","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3172\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3172\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3172\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3172","id":1038351587,"node_id":"I_kwDODunzps494_zj","number":3172,"title":"`SystemError 15` thrown in `Dataset.__del__` when using `Dataset.map()` with `num_proc>1`","user":{"login":"vlievin","id":9859840,"node_id":"MDQ6VXNlcjk4NTk4NDA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9859840?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vlievin","html_url":"https:\/\/github.com\/vlievin","followers_url":"https:\/\/api.github.com\/users\/vlievin\/followers","following_url":"https:\/\/api.github.com\/users\/vlievin\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vlievin\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vlievin\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vlievin\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vlievin\/orgs","repos_url":"https:\/\/api.github.com\/users\/vlievin\/repos","events_url":"https:\/\/api.github.com\/users\/vlievin\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vlievin\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["NB: even if the error is raised, the dataset is successfully cached. So restarting the script after every `map()` allows to ultimately run the whole preprocessing. But this prevents to realistically run the code over multiple nodes.","Hi,\r\n\r\nIt's not easy to debug the problem without the script. I may be wrong since I'm not very familiar with PyTorch Lightning, but shouldn't you preprocess the data in the `prepare_data` function of `LightningDataModule` and not in the `setup` function.\r\nAs you can't modify the module state in `prepare_data` (according to the docs), use the `cache_file_name` argument in `Dataset.map` there, and reload the processed data in `setup` with `Dataset.from_file(cache_file_name)`. If `num_proc>1`, check the docs on the `suffix_template` argument of `Dataset.map` to get an idea what the final `cache_file_names` are going to be.\r\n\r\nLet me know if this helps.","Hi @mariosasko, thank you for the hint, that helped me to move forward with that issue. \r\n\r\nI did a major refactoring of my project to disentangle my `LightningDataModule` and `Dataset`. Just FYI, it looks like:\r\n\r\n```python\r\nclass Builder():\r\n def __call__() -> DatasetDict:\r\n # load and preprocess the data\r\n return dataset\r\n\r\nclass DataModule(LightningDataModule):\r\n def prepare_data():\r\n self.builder()\r\n def setup():\r\n self.dataset = self.builder()\r\n```\r\n\r\nUnfortunately, the entanglement between `LightningDataModule` and `Dataset` was not the issue.\r\n\r\nThe culprit was `hydra` and a slight adjustment of the structure of my project solved this issue. The problematic project structure was:\r\n\r\n```\r\nsrc\/\r\n | - cli.py\r\n | - training\/\r\n | -experiment.py\r\n\r\n# code in experiment.py\r\ndef run_experiment(config):\r\n # preprocess data and run\r\n \r\n# code in cli.py\r\n@hydra.main(...)\r\ndef run(config):\r\n return run_experiment(config)\r\n```\r\n\r\nMoving `run()` from `clip.py` to `training.experiment.py` solved the issue with `SystemError 15`. No idea why. \r\n\r\nEven if the traceback was referring to `Dataset.__del__`, the problem does not seem to be primarily related to `datasets`, so I will close this issue. Thank you for your help!","Please allow me to revive this discussion, as I have an extremely similar issue. Instead of an error, my datasets functions simply aren't caching properly. My setup is almost the same as yours, with hydra to configure my experiment parameters.\r\n\r\n@vlievin Could you confirm if your code correctly loads the cache? If so, do you have any public code that I can reference for comparison?\r\n\r\nI will post a full example with hydra that illustrates this problem in a little bit, probably on another thread."],"created_at":1635416940000,"updated_at":1642251219000,"closed_at":1635938770000,"author_association":"NONE","active_lock_reason":null,"body":"## Describe the bug\r\nI use `datasets.map` to preprocess some data in my application. The error `SystemError 15` is thrown at the end of the execution of `Dataset.map()` (only with `num_proc>1`. Traceback included bellow. \r\n \r\nThe exception is raised only when the code runs within a specific context. Despite ~10h spent investigating this issue, I have failed to isolate the bug, so let me describe my setup. \r\n\r\nIn my project, `Dataset` is wrapped into a `LightningDataModule` and the data is preprocessed when calling `LightningDataModule.setup()`. Calling `.setup()` in an isolated script works fine (even when wrapped with `hydra.main()`). However, when calling `.setup()` within the experiment script (depends on `pytorch_lightning`), the script crashes and `SystemError 15`.\r\n\r\nI could avoid throwing this error by modifying ` Dataset.__del__()` (see bellow), but I believe this only moves the problem somewhere else. I am completely stuck with this issue, any hint would be welcome. \r\n\r\n```python\r\nclass Dataset()\r\n ...\r\n def __del__(self):\r\n if hasattr(self, \"_data\"):\r\n _ = self._data # <- ugly trick that allows avoiding the issue.\r\n del self._data\r\n if hasattr(self, \"_indices\"):\r\n del self._indices\r\n```\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# Unfortunately I couldn't isolate the bug.\r\n```\r\n\r\n## Expected results\r\nCalling `Dataset.map()` without throwing an exception. Or at least raising a more detailed exception\/traceback.\r\n\r\n## Actual results\r\n```\r\nException ignored in: \u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 5\/5 [00:05<00:00, 1.17ba\/s]\r\nTraceback (most recent call last):\r\n File \"...\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 906, in __del__\r\n del self._data\r\n File \"...\/python3.8\/site-packages\/ray\/worker.py\", line 1033, in sigterm_handler\r\n sys.exit(signum)\r\nSystemExit: 15\r\n\r\n```\r\n\r\n## Environment info\r\n\r\nTested on 2 environments:\r\n\r\n**Environment 1.**\r\n- `datasets` version: 1.14.0\r\n- Platform: macOS-10.16-x86_64-i386-64bit\r\n- Python version: 3.8.8\r\n- PyArrow version: 6.0.0\r\n\r\n**Environment 2.**\r\n- `datasets` version: 1.14.0\r\n- Platform: Linux-4.18.0-305.19.1.el8_4.x86_64-x86_64-with-glibc2.28\r\n- Python version: 3.9.7\r\n- PyArrow version: 6.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3172\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3172\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3171","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3171\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3171\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3171\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3171","id":1037728059,"node_id":"I_kwDODunzps492nk7","number":3171,"title":"Raise exceptions instead of using assertions for control flow","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892877,"node_id":"MDU6TGFiZWwxOTM1ODkyODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/good%20first%20issue","name":"good first issue","color":"7057ff","default":true,"description":"Good for newcomers"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Adding the remaining tasks for this issue to help new code contributors. \r\n$ cd src\/datasets && ack assert -lc \r\n- [x] commands\/convert.py:1\r\n- [x] arrow_reader.py:3\r\n- [x] load.py:7\r\n- [x] utils\/py_utils.py:2\r\n- [x] features\/features.py:9\r\n- [x] arrow_writer.py:7\r\n- [x] search.py:6\r\n- [x] table.py:1\r\n- [x] metric.py:3\r\n- [x] tasks\/image_classification.py:1\r\n- [x] arrow_dataset.py:17\r\n- [x] fingerprint.py:6\r\n- [x] io\/json.py:1\r\n- [x] io\/csv.py:1","Hi all,\r\nI am interested in taking up `fingerprint.py`, `search.py`, `arrow_writer.py` and `metric.py`. Will raise a PR soon!","Let me look into `arrow_dataset.py`, `table.py`, `data_files.py` & `features.py` ","All the tasks are completed for this issue. This can be closed. "],"created_at":1635359212000,"updated_at":1640277637000,"closed_at":1640277637000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Motivated by https:\/\/github.com\/huggingface\/transformers\/issues\/12789 in Transformers, one welcoming change would be replacing assertions with proper exceptions. The only type of assertions we should keep are those used as sanity checks.\r\n\r\nCurrently, there is a total of 87 files with the `assert` statements (located under `datasets` and `src\/datasets`), so when working on this, to manage the PR size, only modify 4-5 files at most before submitting a PR.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3171\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3171\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3170","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3170\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3170\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3170\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3170","id":1037601926,"node_id":"PR_kwDODunzps4twDUo","number":3170,"title":"Preserve ordering in `zip_dict`","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1635350850000,"updated_at":1635512977000,"closed_at":1635512977000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Replace `set` with the `unique_values` generator in `zip_dict`.\r\n\r\nThis PR fixes the problem with the different ordering of the example keys across different Python sessions caused by the `zip_dict` call in `Features.decode_example`. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3170\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3170\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3170","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3170","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3170.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3170.patch","merged_at":1635512977000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3169","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3169\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3169\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3169\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3169","id":1036773357,"node_id":"PR_kwDODunzps4ttYmZ","number":3169,"title":"Configurable max filename length in file locks","user":{"login":"lmmx","id":2979452,"node_id":"MDQ6VXNlcjI5Nzk0NTI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2979452?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lmmx","html_url":"https:\/\/github.com\/lmmx","followers_url":"https:\/\/api.github.com\/users\/lmmx\/followers","following_url":"https:\/\/api.github.com\/users\/lmmx\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lmmx\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lmmx\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lmmx\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lmmx\/orgs","repos_url":"https:\/\/api.github.com\/users\/lmmx\/repos","events_url":"https:\/\/api.github.com\/users\/lmmx\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lmmx\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["I've also added environment variable configuration so that this can be configured once per machine (e.g. in a `.bashrc` file), as is already done for a few other config variables here.","Cancelling PR in favour of @mariosasko's in #3173"],"created_at":1635285175000,"updated_at":1635437654000,"closed_at":1635437653000,"author_association":"NONE","active_lock_reason":null,"body":"Resolve #2924 (https:\/\/github.com\/huggingface\/datasets\/issues\/2924#issuecomment-952330956) wherein the assumption of file lock maximum filename length to be 255 raises an OSError on encrypted drives (ecryptFS on Linux uses part of the lower filename, reducing the maximum filename size to 143). Allowing this limit to be set in the config module allows this to be modified by users. Will not affect Windows users, as their class passes 255 on init explicitly.\r\n\r\nReproduced with the following example ([the first few lines of a script from Lightning Flash](https:\/\/lightning-flash.readthedocs.io\/en\/latest\/reference\/speech_recognition.html), fine-tuning a HF model):\r\n\r\n```py\r\nimport torch\r\n\r\nimport flash\r\nfrom flash.audio import SpeechRecognition, SpeechRecognitionData\r\nfrom flash.core.data.utils import download_data\r\n\r\n# 1. Create the DataModule\r\ndownload_data(\"https:\/\/pl-flash-data.s3.amazonaws.com\/timit_data.zip\", \".\/data\")\r\n\r\ndatamodule = SpeechRecognitionData.from_json(\r\n input_fields=\"file\",\r\n target_fields=\"text\",\r\n train_file=\"data\/timit\/train.json\",\r\n test_file=\"data\/timit\/test.json\",\r\n)\r\n```\r\n\r\nWhich gave this traceback:\r\n\r\n```py\r\nTraceback (most recent call last):\r\n File \"lf_ft.py\", line 10, in \r\n datamodule = SpeechRecognitionData.from_json(\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/flash\/core\/data\/data_module.py\", line 1005, in from_json\r\n return cls.from_data_source(\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/flash\/core\/data\/data_module.py\", line 571, in from_data_source\r\n train_dataset, val_dataset, test_dataset, predict_dataset = data_source.to_datasets(\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/flash\/core\/data\/data_source.py\", line 307, in to_datasets\r\n train_dataset = self.generate_dataset(train_data, RunningStage.TRAINING)\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/flash\/core\/data\/data_source.py\", line 344, in generate_dataset\r\n data = load_data(data, mock_dataset)\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/flash\/audio\/speech_recognition\/data.py\", line 103, in load_data\r\n dataset_dict = load_dataset(self.filetype, data_files={stage: str(file)})\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 1599, in load_dataset\r\n builder_instance = load_dataset_builder(\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 1457, in load_dataset_builder\r\n builder_instance: DatasetBuilder = builder_cls(\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 285, in __init__\r\n with FileLock(lock_path):\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/datasets\/utils\/filelock.py\", line 323, in __enter__\r\n self.acquire()\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/datasets\/utils\/filelock.py\", line 272, in acquire\r\n self._acquire()\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/datasets\/utils\/filelock.py\", line 403, in _acquire\r\n fd = os.open(self._lock_file, open_mode)\r\nOSError: [Errno 36] File name too long: '\/home\/louis\/.cache\/huggingface\/datasets\/_home_louis_.cache_huggingface_datasets_json_default-98e6813a547f72fa_0.0.0_c2d554c3377ea79c7664b93dc65d0803b45e3279000f993c7bfd18937fd7f426.lock'\r\n```\r\n\r\nNote the filename is 145 chars long:\r\n\r\n```\r\n>>> len(\"_home_louis_.cache_huggingface_datasets_json_default-98e6813a547f72fa_0.0.0_c2d554c3377ea79c7664b93dc65d0803b45e3279000f993c7bfd18937fd7f426.lock\")\r\n145\r\n```\r\n\r\nAfter installing datasets as an editable local package and modifying the script I was running to first include:\r\n\r\n```py\r\nimport datasets\r\ndatasets.config.MAX_DATASET_CONFIG_ID_READABLE_LENGTH = 143\r\n```\r\n\r\nThe error goes away.\r\n\r\nIf I instead deliberately set the value incorrectly as 144, the OSError returns:\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"lf_ft.py\", line 14, in \r\n datamodule = SpeechRecognitionData.from_json(\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/flash\/core\/data\/data_module.py\", line 1005, in from_json\r\n return cls.from_data_source(\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/flash\/core\/data\/data_module.py\", line 571, in from_data_source\r\n train_dataset, val_dataset, test_dataset, predict_dataset = data_source.to_datasets(\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/flash\/core\/data\/data_source.py\", line 307, in to_datasets\r\n train_dataset = self.generate_dataset(train_data, RunningStage.TRAINING)\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/flash\/core\/data\/data_source.py\", line 344, in generate_dataset\r\n data = load_data(data, mock_dataset)\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/flash\/audio\/speech_recognition\/data.py\", line 103, in load_data\r\n dataset_dict = load_dataset(self.filetype, data_files={stage: str(file)})\r\n File \"\/home\/louis\/dev\/hf_datasets\/src\/datasets\/load.py\", line 1605, in load_dataset\r\n builder_instance = load_dataset_builder(\r\n File \"\/home\/louis\/dev\/hf_datasets\/src\/datasets\/load.py\", line 1463, in load_dataset_builder\r\n builder_instance: DatasetBuilder = builder_cls(\r\n File \"\/home\/louis\/dev\/hf_datasets\/src\/datasets\/builder.py\", line 285, in __init__\r\n with FileLock(lock_path):\r\n File \"\/home\/louis\/dev\/hf_datasets\/src\/datasets\/utils\/filelock.py\", line 326, in __enter__\r\n self.acquire()\r\n File \"\/home\/louis\/dev\/hf_datasets\/src\/datasets\/utils\/filelock.py\", line 275, in acquire\r\n self._acquire()\r\n File \"\/home\/louis\/dev\/hf_datasets\/src\/datasets\/utils\/filelock.py\", line 406, in _acquire\r\n fd = os.open(self._lock_file, open_mode)\r\nOSError: [Errno 36] File name too long: '\/home\/louis\/.cache\/huggingface\/datasets\/_home_louis_.cache_huggingface_datasets_json_default-32c812b5c1272d64_0.0.0_c2d554c3377ea79c7664b93dc65d0803b45e3279...-5794079643713042223.lock'\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3169\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3169\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3169","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3169","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3169.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3169.patch","merged_at":null},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3168","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3168\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3168\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3168\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3168","id":1036673263,"node_id":"I_kwDODunzps49ymDv","number":3168,"title":"OpenSLR\/83 is empty","user":{"login":"tyrius02","id":4561309,"node_id":"MDQ6VXNlcjQ1NjEzMDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4561309?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tyrius02","html_url":"https:\/\/github.com\/tyrius02","followers_url":"https:\/\/api.github.com\/users\/tyrius02\/followers","following_url":"https:\/\/api.github.com\/users\/tyrius02\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tyrius02\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tyrius02\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tyrius02\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tyrius02\/orgs","repos_url":"https:\/\/api.github.com\/users\/tyrius02\/repos","events_url":"https:\/\/api.github.com\/users\/tyrius02\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tyrius02\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"tyrius02","id":4561309,"node_id":"MDQ6VXNlcjQ1NjEzMDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4561309?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tyrius02","html_url":"https:\/\/github.com\/tyrius02","followers_url":"https:\/\/api.github.com\/users\/tyrius02\/followers","following_url":"https:\/\/api.github.com\/users\/tyrius02\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tyrius02\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tyrius02\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tyrius02\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tyrius02\/orgs","repos_url":"https:\/\/api.github.com\/users\/tyrius02\/repos","events_url":"https:\/\/api.github.com\/users\/tyrius02\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tyrius02\/received_events","type":"User","site_admin":false},"assignees":[{"login":"tyrius02","id":4561309,"node_id":"MDQ6VXNlcjQ1NjEzMDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4561309?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tyrius02","html_url":"https:\/\/github.com\/tyrius02","followers_url":"https:\/\/api.github.com\/users\/tyrius02\/followers","following_url":"https:\/\/api.github.com\/users\/tyrius02\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tyrius02\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tyrius02\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tyrius02\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tyrius02\/orgs","repos_url":"https:\/\/api.github.com\/users\/tyrius02\/repos","events_url":"https:\/\/api.github.com\/users\/tyrius02\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tyrius02\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi @tyrius02, thanks for reporting. I see you self-assigned this issue: are you working on this?","@albertvillanova Yes. Figured I introduced the broken config, I should fix it too.\r\n\r\nI've got it working, but I'm struggling with one of the tests. I've started a PR so I\/we can work through it.","Looks like the tests all passed on the PR."],"created_at":1635277341000,"updated_at":1635501849000,"closed_at":1635501849000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"## Describe the bug\r\nAs the summary says, openslr \/ SLR83 \/ train is empty.\r\n\r\nThe dataset returned after loading indicates there are **zero** rows. The correct number should be **17877**.\r\n## Steps to reproduce the bug\r\n```python\r\nimport datasets\r\n\r\n\r\ndatasets.load_dataset('openslr', 'SLR83')\r\n```\r\n\r\n## Expected results\r\n```\r\nDatasetDict({\r\n train: Dataset({\r\n features: ['path', 'audio', 'sentence'],\r\n num_rows: 17877\r\n })\r\n})\r\n```\r\n## Actual results\r\n```\r\nDatasetDict({\r\n train: Dataset({\r\n features: ['path', 'audio', 'sentence'],\r\n num_rows: 0\r\n })\r\n})\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.14.1.dev0 (master HEAD)\r\n- Platform: Ubuntu 20.04\r\n- Python version: 3.7.10\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3168\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3168\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3167","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3167\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3167\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3167\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3167","id":1036488992,"node_id":"I_kwDODunzps49x5Eg","number":3167,"title":"bookcorpusopen no longer works","user":{"login":"lucadiliello","id":23355969,"node_id":"MDQ6VXNlcjIzMzU1OTY5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23355969?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lucadiliello","html_url":"https:\/\/github.com\/lucadiliello","followers_url":"https:\/\/api.github.com\/users\/lucadiliello\/followers","following_url":"https:\/\/api.github.com\/users\/lucadiliello\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lucadiliello\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lucadiliello\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lucadiliello\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lucadiliello\/orgs","repos_url":"https:\/\/api.github.com\/users\/lucadiliello\/repos","events_url":"https:\/\/api.github.com\/users\/lucadiliello\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lucadiliello\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi ! Thanks for reporting :) I think #3280 should fix this","I tried with the latest changes from #3280 on google colab and it worked fine :)\r\nWe'll do a new release soon, in the meantime you can use the updated version with:\r\n```python\r\nload_dataset(\"bookcorpusopen\", revision=\"master\")\r\n```","Fixed by #3280."],"created_at":1635264375000,"updated_at":1637164426000,"closed_at":1637164426000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"## Describe the bug\r\n\r\nWhen using the latest version of datasets (1.14.0), I cannot use the `bookcorpusopen` dataset. The process blocks always around `9924 examples [00:06, 1439.61 examples\/s]` when preparing the dataset. I also noticed that after half an hour the process is automatically killed because of the RAM usage (the machine has 1TB of RAM...).\r\n\r\nThis did not happen with 1.4.1.\r\nI tried also `rm -rf ~\/.cache\/huggingface` but did not help.\r\nChanging python version between 3.7, 3.8 and 3.9 did not help too.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport datasets\r\nd = datasets.load_dataset('bookcorpusopen')\r\n```\r\n\r\n## Expected results\r\nA clear and concise description of the expected results.\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.14.0\r\n- Platform: Linux-5.4.0-1054-aws-x86_64-with-glibc2.27\r\n- Python version: 3.9.7\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3167\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3167\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3166","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3166\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3166\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3166\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3166","id":1036450283,"node_id":"PR_kwDODunzps4tsVQJ","number":3166,"title":"Deprecate prepare_module","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Sounds good, thanks !"],"created_at":1635262104000,"updated_at":1636104457000,"closed_at":1636104456000,"author_association":"MEMBER","active_lock_reason":null,"body":"In version 1.13, `prepare_module` was deprecated.\r\n\r\nThis PR adds a deprecation warning and removes it from all the library, using `dataset_module_factory` or `metric_module_factory` instead.\r\n\r\nFix #3165.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3166\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3166\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3166","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3166","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3166.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3166.patch","merged_at":1636104456000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3165","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3165\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3165\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3165\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3165","id":1036448998,"node_id":"I_kwDODunzps49xvTm","number":3165,"title":"Deprecate prepare_module","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1635262035000,"updated_at":1636104456000,"closed_at":1636104456000,"author_association":"MEMBER","active_lock_reason":null,"body":"In version 1.13, `prepare_module` was deprecated.\r\n\r\nAdd deprecation warning and remove its usage from all the library.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3165\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3165\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3164","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3164\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3164\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3164\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3164","id":1035662830,"node_id":"I_kwDODunzps49uvXu","number":3164,"title":"Add raw data files to the Hub with GitHub LFS for canonical dataset","user":{"login":"zlucia","id":40370937,"node_id":"MDQ6VXNlcjQwMzcwOTM3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/40370937?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zlucia","html_url":"https:\/\/github.com\/zlucia","followers_url":"https:\/\/api.github.com\/users\/zlucia\/followers","following_url":"https:\/\/api.github.com\/users\/zlucia\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zlucia\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zlucia\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zlucia\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zlucia\/orgs","repos_url":"https:\/\/api.github.com\/users\/zlucia\/repos","events_url":"https:\/\/api.github.com\/users\/zlucia\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zlucia\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi @zlucia, I would actually suggest hosting the dataset as a huggingface.co-hosted dataset.\r\n\r\nThe only difference with a \"canonical\"\/legacy dataset is that it's nested under an organization (here `stanford` or `stanfordnlp` for instance \u2013 completely up to you) but then you can upload your data using git-lfs (unlike \"canonical\" datasets where we don't host the data)\r\n\r\nLet me know if this fits your use case!\r\n\r\ncc'ing @osanseviero @lhoestq and rest of the team \ud83e\udd17","Hi @zlucia,\r\n\r\nAs @julien-c pointed out, the way to store\/host raw data files in our Hub is by using what we call \"community\" datasets:\r\n- either at your personal namespace: `load_dataset(\"zlucia\/casehold\")`\r\n- or at an organization namespace: for example, if you create the organization `reglab`, then `load_dataset(\"reglab\/casehold\")`\r\n\r\nPlease note that \"canonical\" datasets do not normally store\/host their raw data at our Hub, but in a third-party server. For \"canonical\" datasets, we just host the \"loading script\", that is, a Python script that downloads the raw data from a third-party server, creates the HuggingFace dataset from it and caches it locally.\r\n\r\nIn order to create an organization namespace in our Hub, please follow this link: https:\/\/huggingface.co\/organizations\/new\r\n\r\nThere are already many organizations at our Hub (complete list here: https:\/\/huggingface.co\/organizations), such as:\r\n- Stanford CRFM: https:\/\/huggingface.co\/stanford-crfm\r\n- Stanford NLP: https:\/\/huggingface.co\/stanfordnlp\r\n- Stanford CS329S: Machine Learning Systems Design: https:\/\/huggingface.co\/stanford-cs329s\r\n\r\nAlso note that you in your organization namespace:\r\n- you can add any number of members\r\n- you can store both raw datasets and models, and those can be immediately accessed using `datasets` and `transformers`\r\n\r\nOnce you have created an organization, these are the steps to upload\/host a raw dataset: \r\n- The no-code procedure: https:\/\/huggingface.co\/docs\/datasets\/upload_dataset.html\r\n- Using the command line (terminal): https:\/\/huggingface.co\/docs\/datasets\/share.html#add-a-community-dataset\r\n\r\nPlease, feel free to ping me if you have any further questions or need help.\r\n","Ah I see, I think I was unclear whether there were benefits to uploading a canonical dataset vs. a community provided dataset. Thanks for clarifying. I'll see if we want to create an organization namespace and otherwise, will upload the dataset under my personal namespace."],"created_at":1635204501000,"updated_at":1635623691000,"closed_at":1635623691000,"author_association":"NONE","active_lock_reason":null,"body":"I'm interested in sharing the CaseHOLD dataset (https:\/\/arxiv.org\/abs\/2104.08671) as a canonical dataset on the HuggingFace Hub and would like to add the raw data files to the Hub with GitHub LFS, since it seems like a more sustainable long term storage solution, compared to other storage solutions available to my team. From what I can tell, this option is not immediately supported if one follows the sharing steps detailed here: [https:\/\/huggingface.co\/docs\/datasets\/share_dataset.html#sharing-a-canonical-dataset](https:\/\/huggingface.co\/docs\/datasets\/share_dataset.html#sharing-a-canonical-dataset), since GitHub LFS is not supported for public forks. Is there a way to request this? Thanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3164\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3164\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3163","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3163\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3163\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3163\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3163","id":1035475061,"node_id":"PR_kwDODunzps4tpI44","number":3163,"title":"Add Image feature","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Awesome, looking forward to using it :)","Few additional comments:\r\n* the current API doesn't meet the requirements mentioned in #3145 (e.g. image mime-type). However, this will be doable soon as we also plan to store image bytes alongside paths in arrow files (see https:\/\/github.com\/huggingface\/datasets\/pull\/3129#discussion_r738426187). Then, PIL can return the correct mime-type: \r\n ```python\r\n from PIL import Image\r\n import io\r\n\r\n mimetype = Image.open(io.BytesIO(image_bytes)).get_format_mimetype()\r\n ``` \r\n I plan to add this change in a separate PR.\r\n* currently, I'm returning an `np.ndarray` object after decoding for consistency with the Audio feature. However, the vision models from Transformers prefer an `Image` object to avoid the `Image.fromarray` call in the corresponding feature extractors (see [this warning](https:\/\/huggingface.co\/transformers\/master\/model_doc\/vit.html#transformers.ViTFeatureExtractor.__call__) in the Transformers docs) cc @NielsRogge \r\n\r\nSo I'm not entirely sure whether to return only a NumPy array, only a PIL Image, or both when decoding. The last point worries me because we shouldn't provide an API that leads to a warning in Transformers (in the docs, not in code :)). At the same time, it makes sense to preserve consistency with the Audio feature and return a NumPy array. \r\n\r\nThat's why I would appreciate your opinions on this.","That is a good question. Also pinging @nateraw .\r\n\r\nCurrently we only support returning numpy arrays because of numpy\/tf\/torch\/jax formatting features that we have, and to keep things simple. See the [set_format docs](https:\/\/huggingface.co\/docs\/datasets\/package_reference\/main_classes.html#datasets.Dataset.set_format) for more info","I don't think centering the discussion on what ViT expects is good, as the vision Transformers model are still in an experimental stage and we can adapt those depending on what you do here :-).\r\n\r\nIMO, the discussion should revolve on what a user will want to do with a vision dataset, and they will want to:\r\n- lazily decode their images\r\n- maybe apply data augmentation (for the training set)\r\n- resize to a fixed shape for batching\r\n\r\nThe libraries that provide step 2 and 3 either use PIL (thinking torchvision) or cv2 (thinking albumentations). NumPy does not have any function to resize an image or do basic data augmentation (like a rotate) so I think it shouldn't be the default format for an image dataset, PIL or cv2 (in an ideal world with the ability to switch between the two depending on what the users prefer) would be better.\r\n\r\nSide note: I will work on the vision integration in Transformers with Niels next month so please keep me in the loop for those awesome new vision features!","@sgugger I completely agree with you, especially after trying to convert the `run_image_classification` script from Transformers to use this feature. The current API doesn't seem intuitive there due to the torchvision transforms, which, as you say, prefer PIL over NumPy arrays. \r\n\r\nSo the default format would return `Image` (PIL) \/ `np.ndarray` (cv2) and `set_format(numpy\/tf\/pt)` would return image tensors if I understand you correctly. IMO this makes a lot more sense (and flexibility) than the current API.","Also, one additional library worth mentioning here is AugLy which supports image file paths and `PIL.Image.Image` objects.","That's so nice !\r\n\r\nAlso I couldn't help myself so I've played with it already ^^\r\nI was agreeably surprised that with minor additions I managed to even allow this, which I find very satisfactory:\r\n```python\r\nimport PIL.Image\r\nfrom datasets import Dataset\r\n\r\npath = \"docs\/source\/imgs\/datasets_logo_name.jpg\"\r\n\r\ndataset = Dataset.from_dict({\"img\": [PIL.Image.open(path)]})\r\nprint(dataset.features)\r\n# {'img': Image(id=None)}\r\nprint(dataset[0][\"img\"])\r\n# \r\n```\r\n\r\nLet me know if that's a behavior you'd also like to see \r\n\r\nEDIT: just pushed my changes on a branch, you can see the diff [here](https:\/\/github.com\/mariosasko\/datasets-1\/compare\/add-image-feature...huggingface:image-type-inference) if you want","Thanks, @lhoestq! I like your change. Very elegant indeed.\r\n\r\nP.S. I have to write a big comment that explains all the changes\/things left to consider. Will do that in the next few days!","I'm marking this PR as ready for review.\r\n\r\nThanks to @sgugger's comment, the API is much more flexible now as it decodes images (lazily) as `PIL.Image.Image` objects and supports transforms directly on them.\r\n\r\nAlso, we no longer return paths explicitly (previously, we would return `{\"path\": image_path, \"image\": pil_image}`) for the following reasons:\r\n* what to return when reading an image from an URL or a NumPy array. We could set `path` to `None` in these situations, but IMO we should avoid redundant information.\r\n* returning a dict doesn't match nicely with the requirement of supporting image modifications - what to do if the user modifies both the image path and the image\r\n\r\n(Btw, for the images stored locally, you can access their paths with `dset[idx][\"image\"].filename`, or by avoiding decoding with `paths = [ex[\"path\"] for ex in dset]`. @lhoestq @albertvillanova WDYT about having an option to skip decoding for complex features, e. g. `Audio(decode=False)`? This way, the user can easily access the underlying data.)\r\n\r\nExamples of what you can do:\r\n```python\r\n# load local images\r\ndset = Dataset.from_dict(\"image\": [local_image_path], features=Features({\"images\": Image()}))\r\n# load remote images (we got this for free by adding support for streaming)\r\ndset = Dataset.from_dict(\"image\": [image_url], features=Features({\"images\": Image()}))\r\n# from np.ndarray\r\ndset = Dataset.from_dict({\"image\": [np.array(...)]}, features=Features({\"images\": Image()}))\r\n# cast column\r\ndset = Dataset.from_dict({\"image\": [local_image_path]})\r\ndset.cast_column(\"image\", Image())\r\n\r\n# automatic type inference\r\ndset = Dataset.from_dict({\"image\": [PIL.Image.open(local_image_path)]})\r\n\r\n# transforms\r\ndef img_transform(example):\r\n ...\r\n example[\"image\"] = transformed_pil_image_or_np_ndarray\r\n return example\r\ndset.map(img_trnasform)\r\n\r\n# transform that adds a new column with images (automatic inference of the feature type)\r\ndset.map(lambda ex: {\"image_resized\": ex[\"image\"].resize((100, 100))})\r\nprint(dset.features[\"image_resized\"]) # will print Image()\r\n```\r\n\r\nSome more cool features:\r\n* We store the image filename (`pil_image.filename`) whenever possible to avoid costly conversion to bytes\r\n* if possible, we use native compression when encoding images. Otherwise, we fall back to the lossless PNG format (e.g. after image ops or when storing NumPy arrays)\r\n\r\nHints to make reviewing easier:\r\n* feel free to ignore the extension type part because it's related to PyArrow internals.\r\n* also, let me know if we are too strict\/ too flexible in terms of types the Image feature can encode\/decode. Hints:\r\n * `encode_example` handles encoding during dataset generation (you can think of it as `yield key, features.encode_example(example)`)\r\n * `objects_to_list_of_image_dicts` handles encoding of returned examples in `map`\r\n\r\nP.S. I'll fork the PR branch and start adding the Image feature to the existing image datasets (will also update the `ImageClassification` template while doing that).","> WDYT about having an option to skip decoding for complex features, e. g. Audio(decode=False)?\r\n\r\nYes definitely, also I think it could be useful for the dataset viewer to not decode the data but instead return either the bytes or the (possibly chained) URL. cc @severo ","We want to merge this today\/tomorrow, so I'd really appreciate your reviews @sgugger @nateraw.\r\n\r\nAlso, you can test this feature on the existing image datasets (MNIST, beans, food101, ...) by installing `datasets` from the PR branch:\r\n```\r\npip install git+https:\/\/github.com\/huggingface\/datasets.git@adapt-image-datasets\r\n```\r\n","Thanks for the review @nateraw!\r\n\r\n1. This is a copy of your notebook with the fixed map call: https:\/\/colab.research.google.com\/gist\/mariosasko\/e351a717682a9392ca03908e65a2600e\/image-feature-demo.ipynb\r\n (Sorry for misleading you with the map call in my un-updated notebook)\r\n Also, we can avoid this cast by trying to infer the type of the column (`\"pixel_values\"`) returned by the image feature extractor (we are already doing something similar for the columns with names: `\"attention_mask\"`, `\"input_ids\"`, ...). I plan to add this QOL improvement soon. \r\n2. It should work OK even without updating Pillow and PyArrow (these two libraries are pre-installed in Colab, so updating them requires a restart of the runtime). \r\n > I noticed an error that I'm guessing you ran into when I tried using the older version\r\n\r\n Do you recall which type of error it was because everything works fine on my side if I run the notebooks with the lowest supported version of Pillow (`6.2.1`)?","Thanks for playing with it @nateraw and for sharing your notebook, this is useful :)\r\n\r\nI think this is ready now, congrats @mariosasko !","Love this feature and hope to release soon!"],"created_at":1635188868000,"updated_at":1640846241000,"closed_at":1638812942000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Adds the Image feature. This feature is heavily inspired by the recently added Audio feature (#2324). Currently, this PR is pretty simple.\r\n\r\nSome considerations that need further discussion:\r\n* I've decided to use `Pillow`\/`PIL` as the image decoding library. Another candidate I considered is `torchvision`, mostly because of its `accimage` backend, which should be faster for loading `jpeg` images than `Pillow`. However, `torchvision`'s io module only supports png and jpeg images, has `torch` as a hard dependency, and requires magic to work with image bytes ( `torch.ByteTensor(torch.ByteStorage.from_buffer(image_bytes)))`).\r\n* Currently, I'm converting `PIL`'s `Image` type to `np.ndarray`. The vision models in Transformers such as ViT prefer the raw `Image` type and not the decoded tensors, so there is a small overhead due to [this conversion](https:\/\/github.com\/huggingface\/transformers\/blob\/3e8761ab8077e3bb243fe2f78b2a682bd2257cf1\/src\/transformers\/image_utils.py#L62-L73). IMO this is justified to keep this part aligned with the Audio feature, which also returns `np.ndarray`. What do you think?\r\n* Still have to work on the channel decoding logic:\r\n * PyTorch prefers the channel-first ordering (C, H, W); TF and Flax the channel-last ordering (H, W, C). One cool feature would be adjusting the channel order based on the selected formatter (`torch`, `tf`, `jax`). \r\n * By default, `Image.open` returns images of shape (H, W, C). However, ViT's feature extractor expects the format (C, H, W) if the image is passed as an array (explained [here](https:\/\/huggingface.co\/transformers\/model_doc\/vit.html#transformers.ViTFeatureExtractor.__call__)), so I'm more inclined to the format (C, H, W). Which one do you prefer, (C, H, W) or (H, W, C)?\r\n* Are there any options you'd like to see? (the user could change those via `cast_column`, such as `sampling_rate` in the Audio feature)\r\n\r\n\r\nTODOs:\r\n* [x] tests\r\n* in subsequent PRs:\r\n * docs - a section in the docs, which gives some additional info on the Image and Audio feature and compares them to \r\n `ArrayND` \r\n * streaming (waiting for #3129 and #3133 to get merged first)\r\n * update the image tasks and the datasets to use the new feature\r\n * Image\/Audio formatting\r\n\r\n[Colab Notebook](https:\/\/colab.research.google.com\/drive\/1mIrTnqTVkWLJWoBzT1ABSe-LFelIep1c?usp=sharing) where you can play with this feature.\r\n\r\nI'm also adding a link to the [Image](https:\/\/github.com\/tensorflow\/datasets\/blob\/7ac7d506488d46038a5854961d068926b3f93c7f\/tensorflow_datasets\/core\/features\/image_feature.py#L155) feature in TFDS because one of our goals is to parse TFDS scripts eventually, so our Image feature has to (at least) support all the formats theirs does.\r\nFeel free to cc anyone who might be interested.\r\n\r\nP.S. Please ignore the changes in the `datasets\/**\/*.py` files \ud83d\ude04.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3163\/reactions","total_count":8,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":7,"rocket":0,"eyes":1},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3163\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3163","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3163","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3163.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3163.patch","merged_at":1638812942000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3162","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3162\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3162\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3162\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3162","id":1035462136,"node_id":"I_kwDODunzps49t-X4","number":3162,"title":"`datasets-cli test` should work with datasets without scripts","user":{"login":"sashavor","id":14205986,"node_id":"MDQ6VXNlcjE0MjA1OTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/14205986?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sashavor","html_url":"https:\/\/github.com\/sashavor","followers_url":"https:\/\/api.github.com\/users\/sashavor\/followers","following_url":"https:\/\/api.github.com\/users\/sashavor\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sashavor\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sashavor\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sashavor\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sashavor\/orgs","repos_url":"https:\/\/api.github.com\/users\/sashavor\/repos","events_url":"https:\/\/api.github.com\/users\/sashavor\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sashavor\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["> It would be really useful to be able to run `datasets-cli test`for datasets that don't have scripts attached to them (whether the datasets are private or not).\r\n> \r\n> I wasn't able to run the script for a private test dataset that I had created on the hub (https:\/\/huggingface.co\/datasets\/huggingface\/DataMeasurementsTest\/tree\/main) -- although @lhoestq came to save the day!\r\n\r\nwhy don't you try to share that info with people, so you can also save some days.","Hi ! You can run the command if you download the repository\r\n```\r\ngit clone https:\/\/huggingface.co\/datasets\/huggingface\/DataMeasurementsTest\r\n```\r\nand run the command\r\n```\r\ndatasets-cli test DataMeasurementsTest\/DataMeasurementsTest.py\r\n```\r\n\r\n(though on my side it doesn't manage to download the data since the dataset is private ^^)","> Hi ! You can run the command if you download the repository\r\n> \r\n> ```\r\n> git clone https:\/\/huggingface.co\/datasets\/huggingface\/DataMeasurementsTest\r\n> ```\r\n> \r\n> and run the command\r\n> \r\n> ```\r\n> datasets-cli test DataMeasurementsTest\/DataMeasurementsTest.py\r\n> ```\r\n> \r\n> (though on my side it doesn't manage to download the data since the dataset is private ^^)\r\n\r\nHi! Thanks for the info. \r\ngit cannot find the repository. Do you know if they have depreciated these tests and created a new one?","I think it's become private, but feel free to try with any other dataset like `lhoestq\/test` for example at `https:\/\/huggingface.co\/datasets\/lhoestq\/test`","> I think it's become private, but feel free to try with any other dataset like `lhoestq\/test` for example at `https:\/\/huggingface.co\/datasets\/lhoestq\/test`\r\n\r\nyour example repo and this page `https:\/\/huggingface.co\/docs\/datasets\/add_dataset.html` helped me to solve.. thanks a lot"],"created_at":1635187950000,"updated_at":1637856269000,"closed_at":null,"author_association":"NONE","active_lock_reason":null,"body":"It would be really useful to be able to run `datasets-cli test`for datasets that don't have scripts attached to them (whether the datasets are private or not).\r\n\r\nI wasn't able to run the script for a private test dataset that I had created on the hub (https:\/\/huggingface.co\/datasets\/huggingface\/DataMeasurementsTest\/tree\/main) -- although @lhoestq came to save the day!\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3162\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3162\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3161","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3161\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3161\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3161\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3161","id":1035444292,"node_id":"PR_kwDODunzps4tpCsm","number":3161,"title":"Add riddle_sense dataset","user":{"login":"ziyiwu9494","id":44691149,"node_id":"MDQ6VXNlcjQ0NjkxMTQ5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44691149?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ziyiwu9494","html_url":"https:\/\/github.com\/ziyiwu9494","followers_url":"https:\/\/api.github.com\/users\/ziyiwu9494\/followers","following_url":"https:\/\/api.github.com\/users\/ziyiwu9494\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ziyiwu9494\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ziyiwu9494\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ziyiwu9494\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ziyiwu9494\/orgs","repos_url":"https:\/\/api.github.com\/users\/ziyiwu9494\/repos","events_url":"https:\/\/api.github.com\/users\/ziyiwu9494\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ziyiwu9494\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["@lhoestq \r\nI address all the comments, I think. Thanks! \r\n","The five test fails are unrelated to this PR and fixed on master so we can ignore them"],"created_at":1635186656000,"updated_at":1636034475000,"closed_at":1636034475000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Adding a new dataset for QA with riddles. I'm confused about the tagging process because it looks like the streamlit app loads data from the current repo, so is it something that should be done after merging or off my fork?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3161\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3161\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3161","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3161","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3161.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3161.patch","merged_at":1636034474000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3160","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3160\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3160\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3160\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3160","id":1035274640,"node_id":"PR_kwDODunzps4tofO0","number":3160,"title":"Better error msg if `len(predictions)` doesn't match `len(references)` in metrics","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Can't test this now but it may be a good improvement indeed.","I added a function, but it only works with the `list` type. For arrays\/tensors, we delegate formatting to the frameworks. "],"created_at":1635175505000,"updated_at":1636112699000,"closed_at":1636104662000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Improve the error message in `Metric.add_batch` if `len(predictions)` doesn't match `len(references)`.\r\n\r\ncc: @BramVanroy (feel free to test this code on your examples and review this PR)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3160\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3160\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3160","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3160","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3160.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3160.patch","merged_at":1636104662000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3159","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3159\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3159\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3159\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3159","id":1035174560,"node_id":"PR_kwDODunzps4toKD5","number":3159,"title":"Make inspect.get_dataset_config_names always return a non-empty list","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["This PR is already working (although not very beautiful; see below): the idea was to have the `DatasetModule.builder_kwargs` accessible from the `builder_cls`, so that this can generate the default builder config (at the class level, without requiring the builder to be instantiated).\r\n\r\nI have a plan for a follow-up refactoring (same functionality, better implementation, much nicer), but I think we could already merge this, so that @severo can test it in the datasets previewer and report any potential issues.","Yes @lhoestq you are completely right. Indeed I was exclusively using `builder_cls.kwargs` to get the community dataset `name` (nothing else): \"lhoestq___demo1\"\r\n\r\nSee et: https:\/\/github.com\/huggingface\/datasets\/pull\/3159\/files#diff-f933ce41f71c6c0d1ce658e27de62cbe0b45d777e9e68056dd012ac3eb9324f7R413-R415\r\n\r\nIn your example, the `name` I was getting from `builder_cls.kwargs` was:\r\n```python\r\n{\"name\": \"lhoestq___demo1\",...}\r\n```\r\n\r\nI'm going to refactor all the approach... as I only need the name for this specific case ;)","I think this makes more sense now, @lhoestq @severo \ud83d\ude05 ","It works well, thanks!"],"created_at":1635170383000,"updated_at":1635513277000,"closed_at":1635399889000,"author_association":"MEMBER","active_lock_reason":null,"body":"Make all named configs cases, so that no special unnamed config case needs to be handled differently.\r\n\r\nFix #3135.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3159\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3159\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3159","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3159","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3159.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3159.patch","merged_at":1635399889000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3158","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3158\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3158\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3158\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3158","id":1035158070,"node_id":"PR_kwDODunzps4toGpe","number":3158,"title":"Fix string encoding for Value type","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["That was fast! \r\n"],"created_at":1635169453000,"updated_at":1635171126000,"closed_at":1635171125000,"author_association":"MEMBER","active_lock_reason":null,"body":"Some metrics have `string` features but currently it fails if users pass integers instead. Indeed feature encoding that handles the conversion of the user's objects to the right python type is missing a case for `string`, while it already works as expected for integers, floats and booleans\r\n\r\nHere is an example code that didn't work previously, but that works with this fix:\r\n```python\r\nimport datasets\r\n\r\n# Note that 'id' is an integer while the SQuAD metric uses strings\r\npredictions = [{'prediction_text': '1976', 'id': 5}]\r\nreferences = [{'answers': {'answer_start': [97], 'text': ['1976']}, 'id': 5}] \r\n\r\nsquad_metric = datasets.load_metric(\"squad\") \r\nsquad_metric.add_batch(predictions=predictions, references=references) \r\nresults = squad_metric.compute()\r\n# {'exact_match': 100.0, 'f1': 100.0}\r\n```\r\n\r\ncc @sgugger @philschmid ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3158\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3158\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3158","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3158","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3158.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3158.patch","merged_at":1635171125000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3157","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3157\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3157\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3157\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3157","id":1034775165,"node_id":"PR_kwDODunzps4tm3_I","number":3157,"title":"Fixed: duplicate parameter and missing parameter in docstring","user":{"login":"PanQiWei","id":46810637,"node_id":"MDQ6VXNlcjQ2ODEwNjM3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46810637?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PanQiWei","html_url":"https:\/\/github.com\/PanQiWei","followers_url":"https:\/\/api.github.com\/users\/PanQiWei\/followers","following_url":"https:\/\/api.github.com\/users\/PanQiWei\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PanQiWei\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PanQiWei\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PanQiWei\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PanQiWei\/orgs","repos_url":"https:\/\/api.github.com\/users\/PanQiWei\/repos","events_url":"https:\/\/api.github.com\/users\/PanQiWei\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PanQiWei\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1635146760000,"updated_at":1635170539000,"closed_at":1635170539000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"changing duplicate parameter `data_files` in `DatasetBuilder.__init__` to the missing parameter `data_dir`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3157\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3157\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3157","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3157","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3157.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3157.patch","merged_at":1635170538000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3156","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3156\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3156\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3156\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3156","id":1034478844,"node_id":"I_kwDODunzps49qOT8","number":3156,"title":"Rouge and Meteor for multiple references","user":{"login":"avinashsai","id":22453634,"node_id":"MDQ6VXNlcjIyNDUzNjM0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22453634?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/avinashsai","html_url":"https:\/\/github.com\/avinashsai","followers_url":"https:\/\/api.github.com\/users\/avinashsai\/followers","following_url":"https:\/\/api.github.com\/users\/avinashsai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/avinashsai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/avinashsai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/avinashsai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/avinashsai\/orgs","repos_url":"https:\/\/api.github.com\/users\/avinashsai\/repos","events_url":"https:\/\/api.github.com\/users\/avinashsai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/avinashsai\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi @avinashsai ,\r\n\r\ncurrently, multiple references are not supported. However, we could add a `multiref` config to fix that. When working with multiple references, we can accumulate them by either taking an average or the best score. Would you like to work on that?","@mariosasko I can help with this issue\r\n"],"created_at":1635098931000,"updated_at":1639289786000,"closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Hi,\r\n\r\nCurrently rogue and meteor supports only single references. Can we use these metrics to calculate for multiple references?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3156\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3156\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3155","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3155\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3155\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3155\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3155","id":1034468757,"node_id":"I_kwDODunzps49qL2V","number":3155,"title":"Illegal instruction (core dumped) at datasets import","user":{"login":"hacobe","id":91226467,"node_id":"MDQ6VXNlcjkxMjI2NDY3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/91226467?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hacobe","html_url":"https:\/\/github.com\/hacobe","followers_url":"https:\/\/api.github.com\/users\/hacobe\/followers","following_url":"https:\/\/api.github.com\/users\/hacobe\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hacobe\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hacobe\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hacobe\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hacobe\/orgs","repos_url":"https:\/\/api.github.com\/users\/hacobe\/repos","events_url":"https:\/\/api.github.com\/users\/hacobe\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hacobe\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["It seems to be an issue with how conda-forge is building the binaries. It works on some machines, but not a machine with AMD Opteron 8384 processors."],"created_at":1635096096000,"updated_at":1637262424000,"closed_at":1637262423000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"## Describe the bug\r\nI install datasets using conda and when I import datasets I get: \"Illegal instruction (core dumped)\"\r\n\r\n## Steps to reproduce the bug\r\n\r\n```\r\nconda create --prefix path\/to\/env\r\nconda activate path\/to\/env\r\nconda install -c huggingface -c conda-forge datasets\r\n# exits with output \"Illegal instruction (core dumped)\"\r\npython -m datasets\r\n```\r\n\r\n## Environment info\r\n\r\nWhen I run \"datasets-cli env\", I also get \"Illegal instruction (core dumped)\"\r\n\r\nIf I run the following commands:\r\n\r\n```\r\nconda create --prefix path\/to\/another\/new\/env\r\nconda activate path\/to\/another\/new\/env\r\nconda install -c huggingface transformers\r\ntransformers-cli env\r\n```\r\n\r\nThen I get:\r\n\r\n- `transformers` version: 4.11.3\r\n- Platform: Linux-5.4.0-67-generic-x86_64-with-glibc2.17\r\n- Python version: 3.8.12\r\n- PyTorch version (GPU?): not installed (NA)\r\n- Tensorflow version (GPU?): not installed (NA)\r\n- Flax version (CPU?\/GPU?\/TPU?): not installed (NA)\r\n- Jax version: not installed\r\n- JaxLib version: not installed\r\n- Using GPU in script?: No\r\n- Using distributed or parallel set-up in script?: No\r\n\r\nLet me know what additional information you need in order to debug this issue. Thanks in advance!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3155\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3155\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3154","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3154\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3154\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3154\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3154","id":1034361806,"node_id":"I_kwDODunzps49pxvO","number":3154,"title":"Sacrebleu unexpected behaviour\/requirement for data format","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi @BramVanroy!\r\n\r\nGood question. This project relies on PyArrow (tables) to store data too big to fit in RAM. In the case of metrics, this means that the number of predictions and references has to match to form a table.\r\n\r\nThat's why your example throws an error even though it matches the schema:\r\n```python\r\nrefs = [\r\n ['The dog bit the man.', 'It was not unexpected.', 'The man bit him first.'],\r\n ['The dog had bit the man.', 'No one was surprised.', 'The man had bitten the dog.'],\r\n] # len(refs) = 2\r\n\r\nhyps = ['The dog bit the man.', \"It wasn't surprising.\", 'The man had just bitten him.'] # len(hyps) = 3\r\n```\r\n\r\nInstead, it should be:\r\n```python\r\nrefs = [\r\n ['The dog bit the man.', 'The dog had bit the man.'],\r\n ['It was not unexpected.', 'No one was surprised.'],\r\n ['The man bit him first.', 'The man had bitten the dog.'], \r\n] # len(refs) = 3\r\n\r\nhyps = ['The dog bit the man.', \"It wasn't surprising.\", 'The man had just bitten him.'] # len(hyps) = 3\r\n```\r\n\r\nHowever, `sacreblue` works with the format that's described in your example, hence this part:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/87c71b9c29a40958973004910f97e4892559dfed\/metrics\/sacrebleu\/sacrebleu.py#L94-L99\r\n\r\nHope you get an idea!","Thanks, that makes sense. It is a bit unfortunate because it may be confusing to users since the input format is suddenly different than what they may expect from the underlying library\/metric. But it is understandable due to how `datasets` works!"],"created_at":1635065733000,"updated_at":1635671312000,"closed_at":1635671311000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"## Describe the bug\r\nWhen comparing with the original `sacrebleu` implementation, the `datasets` implementation does some strange things that I do not quite understand. This issue was triggered when I was trying to implement TER and found the datasets implementation of BLEU [here](https:\/\/github.com\/huggingface\/datasets\/pull\/3153).\r\n\r\nIn the below snippet, the original sacrebleu snippet works just fine whereas the datasets implementation throws an error.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport sacrebleu\r\nimport datasets\r\n\r\nrefs = [\r\n ['The dog bit the man.', 'It was not unexpected.', 'The man bit him first.'],\r\n ['The dog had bit the man.', 'No one was surprised.', 'The man had bitten the dog.'],\r\n]\r\n\r\nhyps = ['The dog bit the man.', \"It wasn't surprising.\", 'The man had just bitten him.']\r\n\r\nexpected_bleu = 48.530827\r\n\r\nds_bleu = datasets.load_metric(\"sacrebleu\")\r\n\r\nbleu_score_sb = sacrebleu.corpus_bleu(hyps, refs).score\r\nprint(bleu_score_sb, expected_bleu)\r\n# works: 48.5308...\r\nbleu_score_ds = ds_bleu.compute(predictions=hyps, references=refs)[\"score\"]\r\nprint(bleu_score_ds, expected_bleu)\r\n# ValueError: Predictions and\/or references don't match the expected format.\r\n```\r\nThis seems to be related to how datasets forces the features format here:\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/87c71b9c29a40958973004910f97e4892559dfed\/metrics\/sacrebleu\/sacrebleu.py#L94-L99\r\n\r\nand then manipulates the references during the compute stage here\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/87c71b9c29a40958973004910f97e4892559dfed\/metrics\/sacrebleu\/sacrebleu.py#L119-L122\r\n\r\nI do not quite understand why that is required since sacrebleu handles argument parsing quite well [by itself](https:\/\/github.com\/mjpost\/sacrebleu\/blob\/2787185dd0f8d224c72ee5a831d163c2ac711a47\/sacrebleu\/metrics\/base.py#L229). \r\n## Actual results\r\nTraceback (most recent call last):\r\n File \"C:\\Users\\bramv\\AppData\\Roaming\\JetBrains\\PyCharm2020.3\\scratches\\scratch_23.py\", line 23, in \r\n bleu_score_ds = ds_bleu.compute(predictions=hyps, references=refs)[\"score\"]\r\n File \"C:\\dev\\python\\datasets\\src\\datasets\\metric.py\", line 392, in compute\r\n self.add_batch(predictions=predictions, references=references)\r\n File \"C:\\dev\\python\\datasets\\src\\datasets\\metric.py\", line 439, in add_batch\r\n raise ValueError(\r\nValueError: Predictions and\/or references don't match the expected format.\r\nExpected format: {'predictions': Value(dtype='string', id='sequence'), 'references': Sequence(feature=Value(dtype='string', id='sequence'), length=-1, id='references')},\r\nInput predictions: ['The dog bit the man.', \"It wasn't surprising.\", 'The man had just bitten him.'],\r\nInput references: [['The dog bit the man.', 'It was not unexpected.', 'The man bit him first.'], ['The dog had bit the man.', 'No one was surprised.', 'The man had bitten the dog.']]\r\n\r\n## Environment info\r\n- `datasets` version: 1.14.1.dev0\r\n- Platform: Windows-10-10.0.19041-SP0\r\n- Python version: 3.9.2\r\n- PyArrow version: 4.0.1\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3154\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3154\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3153","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3153\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3153\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3153\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3153","id":1034179198,"node_id":"PR_kwDODunzps4tlEVE","number":3153,"title":"Add TER (as implemented in sacrebleu)","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["The problem appears to stem from the omission of the lines that you mentioned. If you add them back and try examples from [this](https:\/\/huggingface.co\/docs\/datasets\/using_metrics.html) tutorial (sacrebleu metric example) the code you implemented works fine.\r\n\r\nI think the purpose of these lines is follows:\r\n\r\n1. Sacrebleu metrics confusingly expect a nested list of strings when you have just one reference for each hypothesis (i.e. `[[\"example1\", \"example2\", \"example3]]`), while for cases with more than one reference a _nested list of lists of strings_ (i.e. `[[\"ref1a\", \"ref1b\"], [\"ref2a\", \"ref2b\"], [\"ref3a\", \"ref3b\"]]`) is expected instead. So `transformed_references` line outputs the required single reference format for sacrebleu's ter implementation which you can't pass directly to `compute`.\r\n2. I'm assuming that an additional check is also related to that confusing format with one\/many references, because it's really difficult to tell what exactly you're doing wrong if you're not aware of that issue."],"created_at":1634999205000,"updated_at":1635851051000,"closed_at":1635851051000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Implements TER (Translation Edit Rate) as per its implementation in sacrebleu. Sacrebleu for BLEU scores is already implemented in `datasets` so I thought this would be a nice addition.\r\n\r\nI started from the sacrebleu implementation, as the two metrics have a lot in common.\r\n\r\nVerified with sacrebleu's [testing suite](https:\/\/github.com\/mjpost\/sacrebleu\/blob\/078c440168c6adc89ba75fe6d63f0d922d42bcfe\/test\/test_ter.py) that this indeed works as intended.\r\n\r\n```python\r\nimport datasets\r\n\r\n\r\ntest_cases = [\r\n (['aaaa bbbb cccc dddd'], ['aaaa bbbb cccc dddd'], 0), # perfect match\r\n (['dddd eeee ffff'], ['aaaa bbbb cccc'], 1), # no overlap\r\n ([''], ['a'], 1), # corner case, empty hypothesis\r\n (['d e f g h a b c'], ['a b c d e f g h'], 1 \/ 8), # a single shift fixes MT\r\n (\r\n [\r\n 'w\u00e4hlen Sie \" Bild neu berechnen , \" um beim \u00c4ndern der Bildgr\u00f6\u00dfe Pixel hinzuzuf\u00fcgen oder zu entfernen , damit das Bild ungef\u00e4hr dieselbe Gr\u00f6\u00dfe aufweist wie die andere Gr\u00f6\u00dfe .',\r\n 'wenn Sie alle Aufgaben im aktuellen Dokument aktualisieren m\u00f6chten , w\u00e4hlen Sie im Men\u00fc des Aufgabenbedienfelds die Option \" Alle Aufgaben aktualisieren . \"',\r\n 'klicken Sie auf der Registerkarte \" Optionen \" auf die Schaltfl\u00e4che \" Benutzerdefiniert \" und geben Sie Werte f\u00fcr \" Fehlerkorrektur-Level \" und \" Y \/ X-Verh\u00e4ltnis \" ein .',\r\n 'Sie k\u00f6nnen beispielsweise ein Dokument erstellen , das ein Auto \u00fcber die B\u00fchne enth\u00e4lt .',\r\n 'w\u00e4hlen Sie im Dialogfeld \" Neu aus Vorlage \" eine Vorlage aus und klicken Sie auf \" Neu . \"',\r\n ],\r\n [\r\n 'w\u00e4hlen Sie \" Bild neu berechnen , \" um beim \u00c4ndern der Bildgr\u00f6\u00dfe Pixel hinzuzuf\u00fcgen oder zu entfernen , damit die Darstellung des Bildes in einer anderen Gr\u00f6\u00dfe beibehalten wird .',\r\n 'wenn Sie alle Aufgaben im aktuellen Dokument aktualisieren m\u00f6chten , w\u00e4hlen Sie im Men\u00fc des Aufgabenbedienfelds die Option \" Alle Aufgaben aktualisieren . \"',\r\n 'klicken Sie auf der Registerkarte \" Optionen \" auf die Schaltfl\u00e4che \" Benutzerdefiniert \" und geben Sie f\u00fcr \" Fehlerkorrektur-Level \" und \" Y \/ X-Verh\u00e4ltnis \" niedrigere Werte ein .',\r\n 'Sie k\u00f6nnen beispielsweise ein Dokument erstellen , das ein Auto enthalt , das sich \u00fcber die B\u00fchne bewegt .',\r\n 'w\u00e4hlen Sie im Dialogfeld \" Neu aus Vorlage \" eine Vorlage aus und klicken Sie auf \" Neu . \"',\r\n ],\r\n 0.136 # realistic example from WMT dev data (2019)\r\n ),\r\n]\r\n\r\nter = datasets.load_metric(r\"path\\to\\datasets\\metrics\\ter\")\r\n\r\npredictions = [\"hello there general kenobi\", \"foo bar foobar\"]\r\nreferences = [[\"hello there general kenobi\", \"hello there !\"], [\"foo bar foobar\", \"foo bar foobar\"]]\r\nprint(ter.compute(predictions=predictions, references=references))\r\n\r\nfor hyp, ref, score in test_cases:\r\n # Note the reference transformation which is different from scarebleu's input format\r\n results = ter.compute(predictions=hyp, references=[[r] for r in ref])\r\n assert 100*score == results[\"score\"], f\"expected {100*score}, got {results['score']}\"\r\n```\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3153\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3153\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3153","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3153","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3153.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3153.patch","merged_at":1635851051000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3152","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3152\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3152\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3152\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3152","id":1034039379,"node_id":"PR_kwDODunzps4tkqi-","number":3152,"title":"Fix some typos in the documentation","user":{"login":"h4iku","id":3812788,"node_id":"MDQ6VXNlcjM4MTI3ODg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3812788?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/h4iku","html_url":"https:\/\/github.com\/h4iku","followers_url":"https:\/\/api.github.com\/users\/h4iku\/followers","following_url":"https:\/\/api.github.com\/users\/h4iku\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/h4iku\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/h4iku\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/h4iku\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/h4iku\/orgs","repos_url":"https:\/\/api.github.com\/users\/h4iku\/repos","events_url":"https:\/\/api.github.com\/users\/h4iku\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/h4iku\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1634953115000,"updated_at":1635172056000,"closed_at":1635170628000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3152\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3152\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3152","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3152","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3152.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3152.patch","merged_at":1635170628000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3151","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3151\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3151\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3151\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3151","id":1033890501,"node_id":"PR_kwDODunzps4tkL7t","number":3151,"title":"Re-add faiss to windows testing suite","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1634931269000,"updated_at":1635850054000,"closed_at":1635847563000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"In recent versions, `faiss-cpu` seems to be available for Windows as well. See the [PyPi page](https:\/\/pypi.org\/project\/faiss-cpu\/#files) to confirm. We can therefore included it for Windows in the setup file.\r\n\r\nAt first tests didn't pass due to problems with permissions as caused by `NamedTemporaryFile` on Windows. This built-in library is notoriously poor in playing nice on Windows. The required change isn't pretty, but it works. First set `delete=False` to not automatically try to delete the file on `exit`. Then, manually delete the file with `unlink`. It's weird, I know, but it works.\r\n\r\n```python\r\nwith tempfile.NamedTemporaryFile(delete=False) as tmp_file:\r\n # do stuff\r\nos.unlink(tmp_file.name)\r\n```\r\n\r\ncloses #3150 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3151\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3151\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3151","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3151","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3151.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3151.patch","merged_at":1635847563000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3150","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3150\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3150\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3150\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3150","id":1033831530,"node_id":"I_kwDODunzps49nwRq","number":3150,"title":"Faiss _is_ available on Windows","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Sure, feel free to open a PR."],"created_at":1634926036000,"updated_at":1635847563000,"closed_at":1635847563000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"In the setup file, I find the following:\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/87c71b9c29a40958973004910f97e4892559dfed\/setup.py#L171\r\n\r\nHowever, FAISS does install perfectly fine on Windows on my system. You can also confirm this on the [PyPi page](https:\/\/pypi.org\/project\/faiss-cpu\/#files), where Windows wheels are available. Maybe this was true for older versions? For current versions, this can be removed I think.\r\n\r\n(This isn't really a bug but didn't know how else to tag.)\r\n\r\nIf you agree I can do a quick PR and remove that line.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3150\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3150\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3149","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3149\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3149\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3149\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3149","id":1033747625,"node_id":"PR_kwDODunzps4tjuUt","number":3149,"title":"Add CMU Hinglish DoG Dataset for MT","user":{"login":"Ishan-Kumar2","id":46553104,"node_id":"MDQ6VXNlcjQ2NTUzMTA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46553104?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Ishan-Kumar2","html_url":"https:\/\/github.com\/Ishan-Kumar2","followers_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/followers","following_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/orgs","repos_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/repos","events_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi @lhoestq, thanks a lot for the help. I have moved the part as suggested. \r\nAlthough still while running the dummy data script, I face this issue\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"\/home\/ishan\/anaconda3\/bin\/datasets-cli\", line 8, in \r\n sys.exit(main())\r\n File \"\/home\/ishan\/anaconda3\/lib\/python3.8\/site-packages\/datasets\/commands\/datasets_cli.py\", line 33, in main\r\n service.run()\r\n File \"\/home\/ishan\/anaconda3\/lib\/python3.8\/site-packages\/datasets\/commands\/dummy_data.py\", line 318, in run\r\n self._autogenerate_dummy_data(\r\n File \"\/home\/ishan\/anaconda3\/lib\/python3.8\/site-packages\/datasets\/commands\/dummy_data.py\", line 363, in _autogenerate_dummy_data\r\n dataset_builder._prepare_split(split_generator)\r\n File \"\/home\/ishan\/anaconda3\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 1103, in _prepare_split\r\n example = self.info.features.encode_example(record)\r\n File \"\/home\/ishan\/anaconda3\/lib\/python3.8\/site-packages\/datasets\/features\/features.py\", line 981, in encode_example\r\n return encode_nested_example(self, example)\r\n File \"\/home\/ishan\/anaconda3\/lib\/python3.8\/site-packages\/datasets\/features\/features.py\", line 775, in encode_nested_example\r\n return {\r\n File \"\/home\/ishan\/anaconda3\/lib\/python3.8\/site-packages\/datasets\/features\/features.py\", line 775, in \r\n return {\r\n File \"\/home\/ishan\/anaconda3\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 99, in zip_dict\r\n yield key, tuple(d[key] for d in dicts)\r\n File \"\/home\/ishan\/anaconda3\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 99, in \r\n yield key, tuple(d[key] for d in dicts)\r\nKeyError: 'status'\r\n```\r\nThis KeyError is at times different from 'status' also.\r\nwhen I run \r\n```\r\ndatasets-cli dummy_data datasets\/cmu_hinglish_dog --auto_generate --json_field='history'\r\n```\r\nI have tried removing unnecessary feature type definition, but that didn't help. Please let me know if I am missing something, thanks!","The CI fail is unrelated to this PR and fixed on master. Merging !"],"created_at":1634919445000,"updated_at":1636976202000,"closed_at":1636972065000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Address part of #2841 \r\n\r\nAdded the CMU Hinglish DoG Dataset as in GLUECoS. Added it as a seperate dataset as unlike other tasks of GLUE CoS this can't be evaluated for a BERT like model. \r\nConsists of parallel dataset between Hinglish (Hindi-English) and English, can be used for Machine Translation between the two. \r\n\r\nThe data processing part is inspired from the GLUECoS repo [here](https:\/\/github.com\/microsoft\/GLUECoS\/blob\/7fdc51653e37a32aee17505c47b7d1da364fa77e\/Data\/Preprocess_Scripts\/preprocess_mt_en_hi.py)\r\nThe dummy data part is not working properly, it shows \r\n``` UnboundLocalError: local variable 'generator_splits' referenced before assignment ``` \r\nwhen I run without ``--auto_generate``.\r\n\r\nPlease let me know how I can fix that.\r\nThanks","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3149\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3149\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3149","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3149","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3149.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3149.patch","merged_at":1636972065000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3148","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3148\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3148\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3148\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3148","id":1033685208,"node_id":"I_kwDODunzps49nMjY","number":3148,"title":"Streaming with num_workers != 0","user":{"login":"justheuristic","id":3491902,"node_id":"MDQ6VXNlcjM0OTE5MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3491902?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/justheuristic","html_url":"https:\/\/github.com\/justheuristic","followers_url":"https:\/\/api.github.com\/users\/justheuristic\/followers","following_url":"https:\/\/api.github.com\/users\/justheuristic\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/justheuristic\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/justheuristic\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/justheuristic\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/justheuristic\/orgs","repos_url":"https:\/\/api.github.com\/users\/justheuristic\/repos","events_url":"https:\/\/api.github.com\/users\/justheuristic\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/justheuristic\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["I can confirm that I was able to reproduce the bug. This seems odd given that #3423 reports duplicate data retrieval when `num_workers` and `streaming` are used together, which is obviously different from what is reported here. ","Any update? A possible solution is to have multiple arrow files as shards, and handle them like what webdatasets does.\r\n\r\n\r\nPytorch's new dataset RFC is supporting sharding now, which may helps avoid duplicate data under streaming mode. (https:\/\/github.com\/pytorch\/pytorch\/blob\/master\/torch\/utils\/data\/datapipes\/iter\/grouping.py#L13)\r\n","Hi ! Thanks for the insights :) Note that in streaming mode there're usually no arrow files. The data are streamed from TAR, ZIP, text, etc. files directly from the web. Though for sharded datasets we can definitely adopt a similar strategy !"],"created_at":1634915237000,"updated_at":1641393049000,"closed_at":null,"author_association":"NONE","active_lock_reason":null,"body":"## Describe the bug\r\nWhen using dataset streaming with pytorch DataLoader, the setting num_workers to anything other than 0 causes the code to freeze forever before yielding the first batch.\r\n\r\nThe code owner is likely @lhoestq \r\n\r\n## Steps to reproduce the bug\r\n\r\nFor your convenience, we've prepped a colab notebook that reproduces the bug\r\nhttps:\/\/colab.research.google.com\/drive\/1Mgl0oTZSNIE3UeGl_oX9wPCOIxRg19h1?usp=sharing\r\n```python\r\n!pip install datasets==1.14.0\r\n\r\nshould_freeze_forever = True\r\n# ^-- set this to True in order to freeze forever, set to False in order to work normally\r\n\r\nimport torch\r\nfrom datasets import load_dataset\r\n\r\ndata = load_dataset(\"oscar\", \"unshuffled_deduplicated_bn\", split=\"train\", streaming=True)\r\ndata = data.map(lambda x: {\"text\": x[\"text\"], \"orig\": f\"oscar[{x['id']}]\"}, batched=True)\r\ndata = data.shuffle(100, seed=1337)\r\n\r\ndata = data.with_format(\"torch\")\r\nloader = torch.utils.data.DataLoader(data, batch_size=2, num_workers=2 if should_freeze_forever else 0)\r\n\r\n# v-- the code should freeze forever at this line\r\nfor i, row in enumerate(loader):\r\n print(row)\r\n if i > 10: break\r\nprint(\"DONE!\")\r\n```\r\n\r\n## Expected results\r\nThe code should not freeze forever with num_workers=2\r\n\r\n## Actual results\r\nThe code freezes forever with num_workers=2\r\n\r\n## Environment info\r\n- `datasets` version: 1.14.0 (also found in previous versions)\r\n- Platform: google colab (also locally)\r\n- Python version: 3.7, (also 3.8)\r\n- PyArrow version: 3.0.0\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3148\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3148\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3147","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3147\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3147\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3147\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3147","id":1033607659,"node_id":"PR_kwDODunzps4tjRHG","number":3147,"title":"Fix CLI test to ignore verfications when saving infos","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1634910766000,"updated_at":1635321710000,"closed_at":1635321709000,"author_association":"MEMBER","active_lock_reason":null,"body":"Fix #3146.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3147\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3147\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3147","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3147","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3147.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3147.patch","merged_at":1635321709000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3146","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3146\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3146\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3146\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3146","id":1033605947,"node_id":"I_kwDODunzps49m5M7","number":3146,"title":"CLI test command throws NonMatchingSplitsSizesError when saving infos","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1634910653000,"updated_at":1635321709000,"closed_at":1635321709000,"author_association":"MEMBER","active_lock_reason":null,"body":"When trying to generate a datset JSON metadata, a `NonMatchingSplitsSizesError` is thrown:\r\n```\r\n$ datasets-cli test datasets\/arabic_billion_words --save_infos --all_configs\r\nTesting builder 'Alittihad' (1\/10)\r\nDownloading and preparing dataset arabic_billion_words\/Alittihad (download: 332.13 MiB, generated: Unknown size, post-processed: Unknown size, total: 332.13 MiB) to .cache\\arabic_billion_words\\Alittihad\\1.1.0\\8175ff1c9714c6d5d15b1141b6042e5edf048276bb81a9c14e35e149a7a62ae4...\r\nTraceback (most recent call last):\r\n File \"path\\huggingface\\datasets\\.venv\\Scripts\\datasets-cli-script.py\", line 33, in \r\n sys.exit(load_entry_point('datasets', 'console_scripts', 'datasets-cli')())\r\n File \"path\\huggingface\\datasets\\src\\datasets\\commands\\datasets_cli.py\", line 33, in main\r\n service.run()\r\n File \"path\\huggingface\\datasets\\src\\datasets\\commands\\test.py\", line 144, in run\r\n builder.download_and_prepare(\r\n File \"path\\huggingface\\datasets\\src\\datasets\\builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"path\\huggingface\\datasets\\src\\datasets\\builder.py\", line 709, in _download_and_prepare\r\n verify_splits(self.info.splits, split_dict)\r\n File \"path\\huggingface\\datasets\\src\\datasets\\utils\\info_utils.py\", line 74, in verify_splits\r\n raise NonMatchingSplitsSizesError(str(bad_splits))\r\ndatasets.utils.info_utils.NonMatchingSplitsSizesError: [{'expected': SplitInfo(name='train', num_bytes=0, num_examples=0, dataset_name='arabic_billion_words'), 'recorded': SplitInfo(name='train', num_bytes=1601790302, num_examples=349342, dataset_name='arabic_billion_words')}]\r\n```\r\n\r\nThis is due because a previous run generated a wrong `dataset_info.json`.\r\n\r\nThis error can be avoided by passing `--ignore_verifications`, but I think this should be assumed when passing `--save_infos`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3146\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3146\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3145","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3145\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3145\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3145\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3145","id":1033580009,"node_id":"I_kwDODunzps49my3p","number":3145,"title":"[when Image type will exist] provide a way to get the data as binary + filename","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["@severo, maybe somehow related to this PR ?\r\n- #3129","@severo I'll keep that in mind.\r\n\r\nYou can track progress on the Image feature in #3163 (still in the early stage). ","Hi ! As discussed with @severo offline it looks like the dataset viewer already supports reading PIL images, so maybe the dataset viewer doesn't need to disable decoding after all","Fixed with https:\/\/github.com\/huggingface\/datasets\/pull\/3163"],"created_at":1634909029000,"updated_at":1640171137000,"closed_at":1640171136000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nWhen a dataset cell contains a value of type Image (be it from a remote URL, an Array2D\/3D, or any other way to represent images), I want to be able to write the image to the disk, with the correct filename, and optionally to know its mimetype, in order to serve it on the web.\r\n\r\nNote: this issue would apply exactly the same for the `Audio` type.\r\n\r\n**Describe the solution you'd like**\r\n\r\nIf a \"cell\" has the type `Image`, provide a way to get the binary content of the file, and the filename, eg as:\r\n\r\n```python\r\n filename: str\r\n data: bytes\r\n```\r\n\r\n**Describe alternatives you've considered**\r\n\r\nA way to write the cell to the disk (passing a local directory), and then return the pathname, filename, and mimetype.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3145\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3145\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3144","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3144\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3144\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3144\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3144","id":1033573760,"node_id":"I_kwDODunzps49mxWA","number":3144,"title":"Infer the features if missing","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1634908653000,"updated_at":1634908653000,"closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nSome datasets, in particular community datasets, have no info file, thus no features.\r\n\r\n**Describe the solution you'd like**\r\n\r\nIf a dataset has no features, the first loaded data (5-10 rows) could be used to infer the type.\r\n\r\nRelated: `datasets` would provide a way to load the data, and get the rows AND the features as the result.\r\n\r\n**Describe alternatives you've considered**\r\n\r\nThe HF hub could also provide some UI to help the dataset maintainers to explicit the types of their rows, or automatically infer them as an initial proposal.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3144\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3144\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3143","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3143\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3143\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3143\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3143","id":1033569655,"node_id":"I_kwDODunzps49mwV3","number":3143,"title":"Provide a way to check if the features (in info) match with the data of a split","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Related: #3144 "],"created_at":1634908416000,"updated_at":1634908676000,"closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nI understand that currently the data loaded has not always the type described in the info features\r\n\r\n**Describe the solution you'd like**\r\n\r\nProvide a way to check if the rows have the type described by info features\r\n\r\n**Describe alternatives you've considered**\r\n\r\nAlways check it, and raise an error when loading the data if their type doesn't match the features.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3143\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3143\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3142","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3142\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3142\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3142\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3142","id":1033566034,"node_id":"I_kwDODunzps49mvdS","number":3142,"title":"Provide a way to write a streamed dataset to the disk","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Yes, I agree this feature is much needed. We could do something similar to what TF does (https:\/\/www.tensorflow.org\/api_docs\/python\/tf\/data\/Dataset#cache). \r\n\r\nIdeally, if the entire streamed dataset is consumed\/cached, the generated cache should be reusable for the Arrow dataset."],"created_at":1634908193000,"updated_at":1635506079000,"closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nThe streaming mode allows to get the 100 first rows of a dataset very quickly. But it does not cache the answer, so a posterior call to get the same 100 rows will send a request to the server again and again.\r\n\r\n**Describe the solution you'd like**\r\n\r\nProvide a way to write the streamed rows of a dataset on the disk, and to load from it later.\r\n\r\n**Describe alternatives you've considered**\r\n\r\nProvide a third mode: `lazy`, which would use the local cache for the data that have already been fetched previously, and use streaming to get the rest of the requested data. \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3142\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3142\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3141","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3141\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3141\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3141\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3141","id":1033555910,"node_id":"PR_kwDODunzps4tjGYz","number":3141,"title":"Fix caching bugs","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1634907565000,"updated_at":1634935928000,"closed_at":1634910425000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"This PR fixes some caching bugs (most likely introduced in the latest refactor):\r\n* remove \")\" added by accident in the dataset dir name\r\n* correctly pass the namespace kwargs in `CachedDatasetModuleFactory`\r\n* improve the warning message if `HF_DATASETS_OFFLINE is `True`\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3141\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3141\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3141","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3141","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3141.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3141.patch","merged_at":1634910424000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3140","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3140\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3140\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3140\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3140","id":1033524132,"node_id":"I_kwDODunzps49mlOk","number":3140,"title":"Add DER metric","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"anton-l","id":26864830,"node_id":"MDQ6VXNlcjI2ODY0ODMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26864830?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anton-l","html_url":"https:\/\/github.com\/anton-l","followers_url":"https:\/\/api.github.com\/users\/anton-l\/followers","following_url":"https:\/\/api.github.com\/users\/anton-l\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anton-l\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anton-l\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anton-l\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anton-l\/orgs","repos_url":"https:\/\/api.github.com\/users\/anton-l\/repos","events_url":"https:\/\/api.github.com\/users\/anton-l\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anton-l\/received_events","type":"User","site_admin":false},"assignees":[{"login":"anton-l","id":26864830,"node_id":"MDQ6VXNlcjI2ODY0ODMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26864830?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anton-l","html_url":"https:\/\/github.com\/anton-l","followers_url":"https:\/\/api.github.com\/users\/anton-l\/followers","following_url":"https:\/\/api.github.com\/users\/anton-l\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anton-l\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anton-l\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anton-l\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anton-l\/orgs","repos_url":"https:\/\/api.github.com\/users\/anton-l\/repos","events_url":"https:\/\/api.github.com\/users\/anton-l\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anton-l\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1634905331000,"updated_at":1634905348000,"closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"body":"Add DER metric for speaker diarization task.\r\n\r\nThis is used by SUPERB beenchmark, for example.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3140\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3140\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3139","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3139\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3139\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3139\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3139","id":1033524079,"node_id":"I_kwDODunzps49mlNv","number":3139,"title":"Fix file\/directory deletion on Windows ","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1634905328000,"updated_at":1634905328000,"closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Currently, on Windows, some attempts to delete a dataset file\/directory will fail with the `PerimissionError`. \r\n\r\nExamples:\r\n- download a dataset, then force redownload it in the same session while keeping a reference to the downloaded dataset\r\n```python\r\nfrom datasets import load_dataset\r\ndset = load_dataset(\"sst\", split=\"train\")\r\ndset = load_dataset(\"sst\", split=\"train\", download_mode=\"force_redownload\")\r\n```\r\n- try to clean up the cache files while keeping a reference to those files (via the mapped dataset):\r\n```python\r\nfrom datasets import load_dataset\r\ndset = load_dataset(\"sst\", split=\"train\")\r\ndset_mapped = dset.map(lambda _: {\"dummy_col\": 1})\r\ndset.cleanup_cache_files()\r\n```\r\nWe should fix those.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3139\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3139\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3138","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3138\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3138\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3138\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3138","id":1033379997,"node_id":"I_kwDODunzps49mCCd","number":3138,"title":"More fine-grained taxonomy of error types","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1634895329000,"updated_at":1634895335000,"closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nExceptions like `FileNotFoundError` can be raised by different parts of the code, and it's hard to detect which one did\r\n\r\n**Describe the solution you'd like**\r\n\r\nGive a specific exception type for every group of similar errors\r\n\r\n**Describe alternatives you've considered**\r\n\r\nRely on the error message, using regex\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3138\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3138\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3137","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3137\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3137\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3137\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3137","id":1033363652,"node_id":"PR_kwDODunzps4tievk","number":3137,"title":"Fix numpy deprecation warning for ragged tensors","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["This'll be a really helpful fix, thank you!"],"created_at":1634894266000,"updated_at":1634918655000,"closed_at":1634918654000,"author_association":"MEMBER","active_lock_reason":null,"body":"Numpy shows a deprecation warning when we call `np.array` on a list of ragged tensors without specifying the `dtype`. If their shapes match, the tensors can be collated together, otherwise the resulting array should have `dtype=np.object`.\r\n\r\nFix #3084 \r\ncc @Rocketknight1 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3137\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3137\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3137","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3137","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3137.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3137.patch","merged_at":1634918654000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3136","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3136\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3136\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3136\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3136","id":1033360396,"node_id":"PR_kwDODunzps4tieFi","number":3136,"title":"Fix script of Arabic Billion Words dataset to return all data","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1634894064000,"updated_at":1634909321000,"closed_at":1634909320000,"author_association":"MEMBER","active_lock_reason":null,"body":"The script has a bug and only parses and generates a portion of the entire dataset.\r\n\r\nThis PR fixes the loading script so that is properly parses the entire dataset. \r\n\r\nCurrent implementation generates the same number of examples as reported in the [original paper](https:\/\/arxiv.org\/abs\/1611.04033) for all configurations except for one:\r\n- For \"Youm7\" we generate more examples (1172136) than the ones reported by the paper (1025027)\r\n\r\n| | Number of examples | Number of examples according to the source |\r\n|:---------------|-------------------:|-----:|\r\n| Alittihad | 349342 |349342 |\r\n| Almasryalyoum | 291723 |291723 |\r\n| Almustaqbal | 446873 |446873 |\r\n| Alqabas | 817274 |817274 |\r\n| Echoroukonline | 139732 |139732 |\r\n| Ryiadh | 858188 | 858188 |\r\n| Sabanews | 92149 |92149 |\r\n| SaudiYoum | 888068 |888068 |\r\n| Techreen | 314597 |314597 |\r\n| Youm7 | 1172136 |1025027 |\r\n\r\nFix #3126.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3136\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3136\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3136","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3136","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3136.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3136.patch","merged_at":1634909319000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3135","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3135\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3135\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3135\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3135","id":1033294299,"node_id":"I_kwDODunzps49ltHb","number":3135,"title":"Make inspect.get_dataset_config_names always return a non-empty list of configs","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi @severo, I guess this issue requests not only to be able to access the configuration name (by using `inspect.get_dataset_config_names`), but the configuration itself as well (I mean you use the name to get the configuration afterwards, maybe using `builder_cls.builder_configs`), is this right?","Yes, maybe the issue could be reformulated. As a user, I want to avoid having to manage special cases:\r\n- I want to be able to get the names of a dataset's configs, and use them in the rest of the API (get the data, get the split names, etc).\r\n- I don't want to have to manage datasets with named configs (`glue`) differently from datasets without named configs (`acronym_identification`, `Check\/region_1`)"],"created_at":1634889770000,"updated_at":1635399889000,"closed_at":1635399889000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nCurrently, some datasets have a configuration, while others don't. It would be simpler for the user to always have configuration names to refer to\r\n\r\n**Describe the solution you'd like**\r\n\r\nIn that sense inspect.get_dataset_config_names should always return at least one configuration name, be it `default` or `Check___region_1` (for community datasets like `Check\/region_1`).\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/c5747a5e1dde2670b7f2ca6e79e2ffd99dff85af\/src\/datasets\/inspect.py#L161\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3135\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3135\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3134","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3134\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3134\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3134\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3134","id":1033251755,"node_id":"I_kwDODunzps49liur","number":3134,"title":"Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.11.0\/metrics\/rouge\/rouge.py","user":{"login":"yananchen1989","id":26405281,"node_id":"MDQ6VXNlcjI2NDA1Mjgx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26405281?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yananchen1989","html_url":"https:\/\/github.com\/yananchen1989","followers_url":"https:\/\/api.github.com\/users\/yananchen1989\/followers","following_url":"https:\/\/api.github.com\/users\/yananchen1989\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yananchen1989\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yananchen1989\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yananchen1989\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yananchen1989\/orgs","repos_url":"https:\/\/api.github.com\/users\/yananchen1989\/repos","events_url":"https:\/\/api.github.com\/users\/yananchen1989\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yananchen1989\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi,\r\n\r\nDid you try to run the code multiple times (GitHub URLs can be down sometimes for various reasons)? I can access `https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.11.0\/metrics\/rouge\/rouge.py`, so this code is working without an error on my side. \r\n\r\nAdditionally, can you please run the `datasets-cli env` command because it seems to me that you are using the `datasets` version different from `1.12.1`?","Same issue when running `metric = datasets.load_metric(\"accuracy\")`.\r\nError info is:\r\n```\r\nmetric = datasets.load_metric(\"accuracy\")\r\nTraceback (most recent call last):\r\n\r\n File \"\", line 1, in \r\n metric = datasets.load_metric(\"accuracy\")\r\n\r\n File \"D:\\anaconda3\\lib\\site-packages\\datasets\\load.py\", line 610, in load_metric\r\n module_path, _ = prepare_module(\r\n\r\n File \"D:\\anaconda3\\lib\\site-packages\\datasets\\load.py\", line 330, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n\r\n File \"D:\\anaconda3\\lib\\site-packages\\datasets\\utils\\file_utils.py\", line 288, in cached_path\r\n output_path = get_from_cache(\r\n\r\n File \"D:\\anaconda3\\lib\\site-packages\\datasets\\utils\\file_utils.py\", line 605, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.11.0\/metrics\/accuracy\/accuracy.py\r\n```\r\n\r\n\r\n My `datasets-cli env` result is as follows:\r\n- `datasets` version: 1.11.0\r\n- Platform: Windows-10-10.0.19041-SP0\r\n- Python version: 3.8.8\r\n- PyArrow version: 6.0.0\r\n\r\n@yananchen1989 did you find a way to solve this?","It seems to be able to solve this issue by adding the equivalent `accuracy.py` locally. \r\nchange `metric = datasets.load_metric(\"accuracy\")` to `metric = datasets.load_metric(path = \".\/accuracy.py\")`.\r\nCopy `accuracy.py` from browser at [accuracy.py](https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.11.0\/metrics\/accuracy\/accuracy.py)"],"created_at":1634886472000,"updated_at":1642600952000,"closed_at":1642600951000,"author_association":"NONE","active_lock_reason":null,"body":"datasets version: 1.12.1\r\n\r\n`metric = datasets.load_metric('rouge')`\r\n\r\nThe error:\r\n\r\n> ConnectionError Traceback (most recent call last)\r\n> in \r\n> ----> 1 metric = datasets.load_metric('rouge')\r\n> \r\n> \/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/load.py in load_metric(path, config_name, process_id, num_process, cache_dir, experiment_id, keep_in_memory, download_config, download_mode, script_version, **metric_init_kwargs)\r\n> 613 download_config=download_config,\r\n> 614 download_mode=download_mode,\r\n> --> 615 dataset=False,\r\n> 616 )\r\n> 617 metric_cls = import_main_class(module_path, dataset=False)\r\n> \r\n> \/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/load.py in prepare_module(path, script_version, download_config, download_mode, dataset, force_local_path, dynamic_modules_path, return_resolved_file_path, **download_kwargs)\r\n> 328 file_path = hf_github_url(path=path, name=name, dataset=dataset, version=script_version)\r\n> 329 try:\r\n> --> 330 local_path = cached_path(file_path, download_config=download_config)\r\n> 331 except FileNotFoundError:\r\n> 332 if script_version is not None:\r\n> \r\n> \/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/utils\/file_utils.py in cached_path(url_or_filename, download_config, **download_kwargs)\r\n> 296 use_etag=download_config.use_etag,\r\n> 297 max_retries=download_config.max_retries,\r\n> --> 298 use_auth_token=download_config.use_auth_token,\r\n> 299 )\r\n> 300 elif os.path.exists(url_or_filename):\r\n> \r\n> \/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/utils\/file_utils.py in get_from_cache(url, cache_dir, force_download, proxies, etag_timeout, resume_download, user_agent, local_files_only, use_etag, max_retries, use_auth_token)\r\n> 603 raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\n> 604 _raise_if_offline_mode_is_enabled(f\"Tried to reach {url}\")\r\n> --> 605 raise ConnectionError(\"Couldn't reach {}\".format(url))\r\n> 606\r\n> 607 # Try a second time\r\n> \r\n> ConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.11.0\/metrics\/rouge\/rouge.py\r\n\r\n\r\nIs there any remedy to solve the connection issue ?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3134\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3134\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3133","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3133\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3133\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3133\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3133","id":1032511710,"node_id":"PR_kwDODunzps4tftyZ","number":3133,"title":"Support Audio feature in streaming mode","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1634823477000,"updated_at":1636726385000,"closed_at":1636726384000,"author_association":"MEMBER","active_lock_reason":null,"body":"Fix #3132.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3133\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3133\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3133","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3133","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3133.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3133.patch","merged_at":1636726384000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3132","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3132\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3132\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3132\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3132","id":1032505430,"node_id":"I_kwDODunzps49ishW","number":3132,"title":"Support Audio feature in streaming mode","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1634823138000,"updated_at":1636726384000,"closed_at":1636726384000,"author_association":"MEMBER","active_lock_reason":null,"body":"Currently, Audio feature is only supported for non-streaming datasets.\r\n\r\nDue to the large size of many speech datasets, we should also support Audio feature in streaming mode.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3132\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3132\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3131","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3131\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3131\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3131\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3131","id":1032309865,"node_id":"I_kwDODunzps49h8xp","number":3131,"title":"Add ADE20k","user":{"login":"NielsRogge","id":48327001,"node_id":"MDQ6VXNlcjQ4MzI3MDAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48327001?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NielsRogge","html_url":"https:\/\/github.com\/NielsRogge","followers_url":"https:\/\/api.github.com\/users\/NielsRogge\/followers","following_url":"https:\/\/api.github.com\/users\/NielsRogge\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NielsRogge\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NielsRogge\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NielsRogge\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NielsRogge\/orgs","repos_url":"https:\/\/api.github.com\/users\/NielsRogge\/repos","events_url":"https:\/\/api.github.com\/users\/NielsRogge\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NielsRogge\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":3608941089,"node_id":"LA_kwDODunzps7XHBIh","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/vision","name":"vision","color":"bfdadc","default":false,"description":"Vision datasets"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1634811189000,"updated_at":1638964860000,"closed_at":null,"author_association":"NONE","active_lock_reason":null,"body":"## Adding a Dataset\r\n- **Name:** ADE20k (actually it's called the MIT Scene Parsing Benchmark, it's actually a subset of ADE20k but a lot of authors still call it ADE20k)\r\n- **Description:** A semantic segmentation dataset, consisting of 150 classes.\r\n- **Paper:** http:\/\/people.csail.mit.edu\/bzhou\/publication\/scene-parse-camera-ready.pdf\r\n- **Data:** http:\/\/sceneparsing.csail.mit.edu\/\r\n- **Motivation:** I am currently adding Transformer-based semantic segmentation models that achieve SOTA on this dataset. It would be great to directly access this dataset using HuggingFace Datasets, in order to make example scripts in HuggingFace Transformers.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3131\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3131\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3130","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3130\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3130\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3130\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3130","id":1032299417,"node_id":"PR_kwDODunzps4tfBJU","number":3130,"title":"Create SECURITY.md","user":{"login":"zidingz","id":28839565,"node_id":"MDQ6VXNlcjI4ODM5NTY1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28839565?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zidingz","html_url":"https:\/\/github.com\/zidingz","followers_url":"https:\/\/api.github.com\/users\/zidingz\/followers","following_url":"https:\/\/api.github.com\/users\/zidingz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zidingz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zidingz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zidingz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zidingz\/orgs","repos_url":"https:\/\/api.github.com\/users\/zidingz\/repos","events_url":"https:\/\/api.github.com\/users\/zidingz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zidingz\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi @zidingz, thanks for your contribution.\r\n\r\nHowever I am closing it because it is a duplicate of a previous PR:\r\n - #2958\r\n\r\n"],"created_at":1634810583000,"updated_at":1634826808000,"closed_at":1634826710000,"author_association":"NONE","active_lock_reason":null,"body":"To let the repository confirm feedback@huggingface.co as its security contact.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3130\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3130\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3130","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3130","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3130.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3130.patch","merged_at":null},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3129","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3129\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3129\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3129\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3129","id":1032234167,"node_id":"PR_kwDODunzps4tezlA","number":3129,"title":"Support Audio feature for TAR archives in sequential access","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Also do you think we can adapt `cast_column` to keep the same value for this new parameter when the user only wants to change the sampling rate ?","Thanks for your comments, @lhoestq, I will address them afterwards.\r\n\r\nBut, I think it is more important\/urgent first address the current blocking non-passing test: https:\/\/github.com\/huggingface\/datasets\/runs\/4143579241?check_suite_focus=true\r\n- I am thinking of a way of solving it, but if you have any hint, it will be more than welcome! \ud83d\ude05 \r\n\r\nBasically:\r\n```\r\n{'audio': '\/tmp\/pytest-of-runner\/pytest-0\/popen-gw1\/test_dataset_with_audio_featur1\/data\/test_audio_44100.wav'}\r\n``` \r\nbecomes\r\n```\r\n{'audio': {'bytes': None, 'path': '\/tmp\/pytest-of-runner\/pytest-0\/popen-gw1\/test_dataset_with_audio_featur1\/data\/test_audio_44100.wav'}}\r\n```\r\nafter a `map`, which is what was stored in the Arrow file. However we expect it remains invariant after this `map`.","@lhoestq, @mariosasko I finally proposed another implementation different from my last one:\r\n- Before: store Audio always a struct, where bytes can be None\r\n- Now, depending on the examples, either store Audio as a struct (as before), or as a string.\r\n\r\nPlease note that the main motivation for this change was the issue mentioned above: https:\/\/github.com\/huggingface\/datasets\/pull\/3129#issuecomment-964347056\r\n","Until here we had the assumption that a Features object always has an associated, deterministic, pyarrow schema. This is useful to ensure that we are able to concatenate two datasets that have the same features for example.\r\n\r\nBy breaking this assumption for the Audio type, how can we ensure that we can concatenate two audio datasets if one has Audio as a struct and the other a string ?","Oh I noticed that the Audio feature type has a private attribute `_storage_dtype`, so the assumption still holds, since they are now different feature types depending on the this attribute :)\r\n(i mean different from the python equal operator point of view)","I think this PR is ready, @lhoestq, @mariosasko. ","Nit: We should also mention the new storage structure in the `Features` docstring [here](https:\/\/github.com\/huggingface\/datasets\/blob\/b29fb550c31de337b952035a7584147e0f18c0cf\/src\/datasets\/features\/features.py#L966) for users to know what type of value to return in their dataset scripts (we also have a link to that docstring in the `ADD_NEW_DATASET` template)."],"created_at":1634806611000,"updated_at":1637170928000,"closed_at":1637170927000,"author_association":"MEMBER","active_lock_reason":null,"body":"Add Audio feature support for TAR archived files in sequential access.\r\n\r\nFix #3128.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3129\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3129\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3129","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3129","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3129.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3129.patch","merged_at":1637170927000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3128","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3128\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3128\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3128\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3128","id":1032201870,"node_id":"I_kwDODunzps49hiaO","number":3128,"title":"Support Audio feature for TAR archives in sequential access","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1634804581000,"updated_at":1637170927000,"closed_at":1637170927000,"author_association":"MEMBER","active_lock_reason":null,"body":"Currently, Audio feature accesses each audio file by their file path.\r\n\r\nHowever, streamed TAR archive files do not allow random access to their archived files.\r\n\r\nTherefore, we should enhance the Audio feature to support TAR archived files in sequential access.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3128\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3128\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3127","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3127\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3127\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3127\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3127","id":1032100613,"node_id":"I_kwDODunzps49hJsF","number":3127,"title":"datasets-cli: convertion of a tfds dataset to a huggingface one.","user":{"login":"vitalyshalumov","id":33824221,"node_id":"MDQ6VXNlcjMzODI0MjIx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33824221?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vitalyshalumov","html_url":"https:\/\/github.com\/vitalyshalumov","followers_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/followers","following_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/orgs","repos_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/repos","events_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi,\r\n\r\nthe MNIST dataset is already available on the Hub. You can use it as follows:\r\n```python\r\nimport datasets\r\ndataset_dict = datasets.load_dataset(\"mnist\")\r\n```\r\n\r\nAs for the conversion of TFDS datasets to HF datasets, we will be working on it in the coming months, so stay tuned."],"created_at":1634796867000,"updated_at":1635334565000,"closed_at":null,"author_association":"NONE","active_lock_reason":null,"body":"### Discussed in https:\/\/github.com\/huggingface\/datasets\/discussions\/3079\r\n\r\n\r\n\r\nOriginally posted by **vitalyshalumov** October 14, 2021<\/sup>\r\nI'm trying to convert a tfds dataset to a huggingface one.\r\n\r\nI've tried:\r\n\r\n1. datasets-cli convert --tfds_path ~\/tensorflow_datasets\/mnist\/3.0.1\/ --datasets_directory ~\/.cache\/huggingface\/datasets\/mnist\/3.0.1\/\r\n\r\n2. datasets-cli convert --tfds_path ~\/tensorflow_datasets\/mnist\/3.0.1\/ --datasets_directory ~\/.cache\/huggingface\/datasets\/\r\n\r\n\r\nand other permutations.\r\nThe script appears to be running and finishing without an error but when looking in the huggingface\/datasets\/ folder nothing is created.\r\n\r\n\r\n<\/div>","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3127\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3127\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3126","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3126\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3126\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3126\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3126","id":1032093055,"node_id":"I_kwDODunzps49hH1_","number":3126,"title":"\"arabic_billion_words\" dataset does not create the full dataset","user":{"login":"vitalyshalumov","id":33824221,"node_id":"MDQ6VXNlcjMzODI0MjIx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33824221?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vitalyshalumov","html_url":"https:\/\/github.com\/vitalyshalumov","followers_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/followers","following_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/orgs","repos_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/repos","events_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Thanks for reporting, @vitalyshalumov.\r\n\r\nApparently the script to parse the data has a bug, and does not generate the entire dataset.\r\n\r\nI'm fixing it."],"created_at":1634796158000,"updated_at":1634909320000,"closed_at":1634909320000,"author_association":"NONE","active_lock_reason":null,"body":"## Describe the bug\r\nWhen running: \r\nraw_dataset = load_dataset('arabic_billion_words','Alittihad')\r\nthe correct dataset file is pulled from the url.\r\nBut, the generated dataset includes just a small portion of the data included in the file.\r\nThis is true for all other portions of the \"arabic_billion_words\" dataset ('Almasryalyoum',.....)\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# Sample code to reproduce the bug\r\nraw_dataset = load_dataset('arabic_billion_words','Alittihad')\r\n\r\n#The screen message\r\nDownloading and preparing dataset arabic_billion_words\/Alittihad (download: 332.13 MiB, generated: 20.62 MiB, post-processed: Unknown size, total: 352.74 MiB) \r\n\r\n## Expected results\r\nover 100K sentences\r\n\r\n## Actual results\r\nonly 11K sentences\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.14.0\r\n- Platform: Linux-5.8.0-63-generic-x86_64-with-glibc2.29\r\n- Python version: 3.8.10\r\n- PyArrow version: 4.0.1\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3126\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3126\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3125","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3125\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3125\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3125\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3125","id":1032046666,"node_id":"PR_kwDODunzps4teNPC","number":3125,"title":"Add SLR83 to OpenSLR","user":{"login":"tyrius02","id":4561309,"node_id":"MDQ6VXNlcjQ1NjEzMDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4561309?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tyrius02","html_url":"https:\/\/github.com\/tyrius02","followers_url":"https:\/\/api.github.com\/users\/tyrius02\/followers","following_url":"https:\/\/api.github.com\/users\/tyrius02\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tyrius02\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tyrius02\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tyrius02\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tyrius02\/orgs","repos_url":"https:\/\/api.github.com\/users\/tyrius02\/repos","events_url":"https:\/\/api.github.com\/users\/tyrius02\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tyrius02\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1634790360000,"updated_at":1634933405000,"closed_at":1634891422000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"The PR resolves #3119, adding SLR83 (UK and Ireland dialects) to the previously created OpenSLR dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3125\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3125\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3125","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3125","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3125.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3125.patch","merged_at":1634891422000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3124","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3124\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3124\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3124\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3124","id":1031976286,"node_id":"PR_kwDODunzps4td-5w","number":3124,"title":"More efficient nested features encoding","user":{"login":"eladsegal","id":13485709,"node_id":"MDQ6VXNlcjEzNDg1NzA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13485709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eladsegal","html_url":"https:\/\/github.com\/eladsegal","followers_url":"https:\/\/api.github.com\/users\/eladsegal\/followers","following_url":"https:\/\/api.github.com\/users\/eladsegal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eladsegal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eladsegal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eladsegal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eladsegal\/orgs","repos_url":"https:\/\/api.github.com\/users\/eladsegal\/repos","events_url":"https:\/\/api.github.com\/users\/eladsegal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eladsegal\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["@lhoestq @albertvillanova @mariosasko\r\nCan you please check this out?","Thanks, done!"],"created_at":1634781331000,"updated_at":1635865633000,"closed_at":1635851044000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Nested encoding of features wastes a lot of time on operations which are effectively doing nothing when lists are used.\r\nFor example, if in the input we have a list of integers, `encoded_nested_example` will iterate over it and apply `encoded_nested_example` on every element even though it just return the int as is.\r\n\r\nA similar issue is handled at an earlier stage when casting pytorch\/tensorflow\/pandas objects to python lists\/numpy arrays:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/c98c23c4260edadab00f997d1a5d66b7f2e93ce9\/src\/datasets\/features\/features.py#L149-L156\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/c98c23c4260edadab00f997d1a5d66b7f2e93ce9\/src\/datasets\/features\/features.py#L212-L228\r\n\r\nIn this pull request I suggest to use the same approach in `encoded_nested_example`.\r\nIn my setup there was a major speedup with this change: loading the data was at least x4 faster. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3124\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3124\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3124","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3124","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3124.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3124.patch","merged_at":1635851044000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3123","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3123\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3123\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3123\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3123","id":1031793207,"node_id":"I_kwDODunzps49f-o3","number":3123,"title":"Segmentation fault when loading datasets from file","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi ! I created an issue on Arrow's JIRA after making a minimum reproducible example\r\n\r\nhttps:\/\/issues.apache.org\/jira\/browse\/ARROW-14439\r\n\r\n```python\r\nimport io\r\n\r\nimport pyarrow.json as paj\r\n\r\nbatch = b'{\"a\": [], \"b\": 1}\\n{\"b\": 1}'\r\nblock_size = 12\r\n\r\npaj.read_json(\r\n io.BytesIO(batch), read_options=paj.ReadOptions(block_size=block_size)\r\n)\r\n```\r\n\r\nI don't see a way to workaround this properly now without hurting the performance of the JSON loader significantly though","The issue has been fixed in pyarrow 6.0.0, please update pyarrow :)\r\n\r\nThe issue was due to missing fields in the JSON data of type list. Now it's working fine and missing list fields are replaced with empty lists"],"created_at":1634760971000,"updated_at":1635865027000,"closed_at":1635865027000,"author_association":"MEMBER","active_lock_reason":null,"body":"## Describe the bug\r\nCustom dataset loading sometimes segfaults and kills the process if chunks contain a variety of features\/\r\n\r\n## Steps to reproduce the bug\r\n\r\nDownload an example file:\r\n```\r\nwget https:\/\/gist.githubusercontent.com\/TevenLeScao\/11e2184394b3fa47d693de2550942c6b\/raw\/4232704d08fbfcaf93e5b51def9e5051507651ad\/tiny_kelm.jsonl\r\n```\r\nThen in Python:\r\n```\r\nimport datasets\r\ntiny_kelm = datasets.load_dataset(\"json\", data_files=\"tiny_kelm.jsonl\", chunksize=100000)\r\n```\r\n\r\n## Expected results\r\na `tiny_kelm` functional dataset\r\n\r\n## Actual results\r\n\u2620\ufe0f `Segmentation fault (core dumped)` \u2620\ufe0f\r\n\r\n## Environment info\r\n- `datasets` version: 1.14.0\r\n- Platform: Linux-5.11.0-38-generic-x86_64-with-glibc2.29\r\n- Python version: 3.8.10\r\n- PyArrow version: 5.0.0","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3123\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3123\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3122","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3122\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3122\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3122\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3122","id":1031787509,"node_id":"I_kwDODunzps49f9P1","number":3122,"title":"OSError with a custom dataset loading script","user":{"login":"suzanab","id":38602977,"node_id":"MDQ6VXNlcjM4NjAyOTc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38602977?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/suzanab","html_url":"https:\/\/github.com\/suzanab","followers_url":"https:\/\/api.github.com\/users\/suzanab\/followers","following_url":"https:\/\/api.github.com\/users\/suzanab\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/suzanab\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/suzanab\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/suzanab\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/suzanab\/orgs","repos_url":"https:\/\/api.github.com\/users\/suzanab\/repos","events_url":"https:\/\/api.github.com\/users\/suzanab\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/suzanab\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi,\r\n\r\nthere is a difference in how the `data_dir` is zipped between the `classla\/janes_tag` and the `classla\/reldi_hr` dataset. After unzipping, for the former, the data files (`*.conllup`) are in the root directory (root -> data files), and for the latter, they are inside the `data` directory (root -> `data` -> data files).\r\n\r\nThis can be fixed by removing the `os.path.join` call in https:\/\/huggingface.co\/datasets\/classla\/janes_tag\/blob\/main\/janes_tag.py#L86\r\n\r\nLet me know if this works for you.","Hi Mario,\r\n\r\nI had already tried that before, but it didn't work. I have now recreated the `classla\/janes_tag` zip file so that it also contains the `data` directory, but I am still getting the same error.","Hi,\r\n\r\nI just tried to download the `classla\/janes_tag` dataset, and this time the zip file is extracted correctly. However, the script is now throwing the IndexError, probably due to a bug in the `_generate_examples`.\r\n\r\nLet me know if you are still getting the same error.","I am still getting the same error.","Hi, \r\n\r\ncould you try to download the dataset with a different `cache_dir` like so:\r\n```python\r\nimport datasets\r\ndataset = datasets.load_dataset('classla\/janes_tag', split='validation', cache_dir=\"path\/to\/different\/cache\/dir\")\r\n```\r\nIf this works, then most likely the cached extracted data is causing issues. This data is stored at `~\/.cache\/huggingface\/datasets\/downloads\/extracted` and needs to be deleted, and then it should work (you can easily locate the directory with the path given in the `OSError` message). Additionally, I'd suggest you to update `datasets` to the newest version with:\r\n```\r\npip install -U datasets\r\n```","Thank you, deleting the `~\/.cache\/huggingface\/datasets\/downloads\/extracted` directory helped. However, I am still having problems.\r\n\r\nThere was indeed a bug in the script that was throwing an `IndexError`, which I have now corrected (added the condition to skip the lines starting with '# text') and it is working locally, but still throws an error when I try to load the dataset from HuggingFace. I literally copied and pasted the `_generate_examples` function and ran it on the `dev_all.conllup` file, which I even re-downloaded from the repository to be certain that the files are exactly the same. I also deleted everything again just in case, but it didn't help. The code works locally, but throws an `IndexError` when loading from `datasets.`","Hi,\r\n\r\nDid some investigation.\r\n\r\nTo fix the dataset script on the Hub, append the following labels to the `names` list of the `upos_tags` field:\r\n```'INTJ NOUN', 'AUX PRON', 'PART ADV', 'PRON ADP', 'INTJ INTJ', 'VERB NOUN', 'NOUN AUX'```.\r\n\r\nThis step is required to avoid an error due to missing labels in the following step which is:\r\n```python\r\nload_dataset(\"classla\/janes_tag\", split=\"validation\", download_mode=\"force_redownload\")\r\n```\r\nThis will generate and cache the dataset, so specifying `download_mode` will not be required anymore unless you update the script\/data on the Hub.","It works now, thank you!"],"created_at":1634760519000,"updated_at":1637661338000,"closed_at":1637661338000,"author_association":"NONE","active_lock_reason":null,"body":"## Describe the bug\r\nI am getting an OS error when trying to load the newly uploaded dataset classla\/janes_tag. What puzzles me is that I have already uploaded a very similar dataset - classla\/reldi_hr - with no issues. The loading scripts for the two datasets are almost identical and they have the same directory structure, yet I am only getting an error with janes_tag.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\ndataset = datasets.load_dataset('classla\/janes_tag', split='validation')\r\n```\r\n\r\n## Expected results\r\nDataset correctly loaded.\r\n\r\n## Actual results\r\n\r\nTraceback (most recent call last):\r\n File \"C:\/mypath\/test.py\", line 91, in \r\n load_and_print('janes_tag')\r\n File \"C:\/mypath\/test.py\", line 32, in load_and_print\r\n dataset = datasets.load_dataset('classla\/{}'.format(ds_name), split='validation')\r\n File \"C:\\mypath\\venv\\lib\\site-packages\\datasets\\load.py\", line 1632, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"C:\\mypath\\venv\\lib\\site-packages\\datasets\\builder.py\", line 608, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"C:\\mypath\\venv\\lib\\site-packages\\datasets\\builder.py\", line 704, in _download_and_prepare\r\n ) from None\r\nOSError: Cannot find data file. \r\nOriginal error:\r\n[Errno 2] No such file or directory: 'C:\\\\mypath\\\\.cache\\\\huggingface\\\\datasets\\\\downloads\\\\2c9996e44bdc5af9c89bffb9e6d7a3e42fdb2f56bacab45de13b20f3032ea7ca\\\\data\\\\train_all.conllup'\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.14.0\r\n- Platform: Windows-10-10.0.19041-SP0\r\n- Python version: 3.7.5\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3122\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3122\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3121","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3121\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3121\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3121\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3121","id":1031673115,"node_id":"PR_kwDODunzps4tc_6q","number":3121,"title":"Use huggingface_hub.HfApi to list datasets\/metrics","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1634752109000,"updated_at":1636112708000,"closed_at":1636105716000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"Delete `datasets.inspect.HfApi` and use `huggingface_hub.HfApi` instead.\r\n\r\nWIP until https:\/\/github.com\/huggingface\/huggingface_hub\/pull\/429 is merged, then wait for the new release of `huggingface_hub`, update the `huggingface_hub` version in `setup.py` and merge this PR.\r\n\r\ncc: @lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3121\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3121\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3121","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3121","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3121.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3121.patch","merged_at":1636105715000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3120","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3120\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3120\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3120\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3120","id":1031574511,"node_id":"PR_kwDODunzps4tcril","number":3120,"title":"Correctly update metadata to preserve features when concatenating datasets with axis=1","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1634745298000,"updated_at":1634891331000,"closed_at":1634827821000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"This PR correctly updates metadata to preserve higher-level feature types (e.g. `ClassLabel`) in `datasets.concatenate_datasets` when `axis=1`. Previously, we would delete the feature metadata in `datasets.concatenate_datasets` if `axis=1` and restore the feature types from the arrow table schema in `Dataset.__init__`. However, this approach only works for simple feature types (e.g. `Value`).\r\n\r\nFixes #3111","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3120\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3120\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3120","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3120","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3120.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3120.patch","merged_at":1634827821000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3119","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3119\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3119\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3119\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3119","id":1031328044,"node_id":"I_kwDODunzps49eNEs","number":3119,"title":"Add OpenSLR 83 - Crowdsourced high-quality UK and Ireland English Dialect speech","user":{"login":"tyrius02","id":4561309,"node_id":"MDQ6VXNlcjQ1NjEzMDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4561309?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tyrius02","html_url":"https:\/\/github.com\/tyrius02","followers_url":"https:\/\/api.github.com\/users\/tyrius02\/followers","following_url":"https:\/\/api.github.com\/users\/tyrius02\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tyrius02\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tyrius02\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tyrius02\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tyrius02\/orgs","repos_url":"https:\/\/api.github.com\/users\/tyrius02\/repos","events_url":"https:\/\/api.github.com\/users\/tyrius02\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tyrius02\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":{"login":"tyrius02","id":4561309,"node_id":"MDQ6VXNlcjQ1NjEzMDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4561309?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tyrius02","html_url":"https:\/\/github.com\/tyrius02","followers_url":"https:\/\/api.github.com\/users\/tyrius02\/followers","following_url":"https:\/\/api.github.com\/users\/tyrius02\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tyrius02\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tyrius02\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tyrius02\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tyrius02\/orgs","repos_url":"https:\/\/api.github.com\/users\/tyrius02\/repos","events_url":"https:\/\/api.github.com\/users\/tyrius02\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tyrius02\/received_events","type":"User","site_admin":false},"assignees":[{"login":"tyrius02","id":4561309,"node_id":"MDQ6VXNlcjQ1NjEzMDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4561309?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tyrius02","html_url":"https:\/\/github.com\/tyrius02","followers_url":"https:\/\/api.github.com\/users\/tyrius02\/followers","following_url":"https:\/\/api.github.com\/users\/tyrius02\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tyrius02\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tyrius02\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tyrius02\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tyrius02\/orgs","repos_url":"https:\/\/api.github.com\/users\/tyrius02\/repos","events_url":"https:\/\/api.github.com\/users\/tyrius02\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tyrius02\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Ugh. The index files for SLR83 are CSV, not TSV. I need to add logic to process these index files."],"created_at":1634731507000,"updated_at":1634929252000,"closed_at":1634891422000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"## Adding a Dataset\r\n- **Name:** *openslr**\r\n- **Description:** *Data set which contains male and female recordings of English from various dialects of the UK and Ireland.*\r\n- **Paper:** *https:\/\/www.openslr.org\/resources\/83\/about.html*\r\n- **Data:** *Eleven separate data files can be found via https:\/\/www.openslr.org\/resources\/83\/*\r\n- **Motivation:** *Increase english ASR data with UK and Irish dialects*\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n\r\nThe *openslr* dataset already exists, this will add additional subset, *SLR83*.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3119\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3119\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3118","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3118\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3118\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3118\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3118","id":1031309549,"node_id":"PR_kwDODunzps4tb0LY","number":3118,"title":"Fix CI error at each release commit","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1634730278000,"updated_at":1634734956000,"closed_at":1634734956000,"author_association":"MEMBER","active_lock_reason":null,"body":"Fix test_load_dataset_canonical at release commit.\r\n\r\nFix #3117.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3118\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3118\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3118","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3118","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3118.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3118.patch","merged_at":1634734955000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3117","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3117\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3117\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3117\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3117","id":1031308083,"node_id":"I_kwDODunzps49eIMz","number":3117,"title":"CI error at each release commit","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1634730173000,"updated_at":1634734955000,"closed_at":1634734955000,"author_association":"MEMBER","active_lock_reason":null,"body":"After 1.12.0, there is a recurrent CI error at each release commit: https:\/\/app.circleci.com\/pipelines\/github\/huggingface\/datasets\/8289\/workflows\/665d954d-e409-4602-8202-e678594d2946\/jobs\/51110\r\n\r\n```\r\n____________________ LoadTest.test_load_dataset_canonical _____________________\r\n[gw0] win32 -- Python 3.6.8 C:\\tools\\miniconda3\\python.exe\r\n\r\nself = \r\n\r\n def test_load_dataset_canonical(self):\r\n scripts_version = os.getenv(\"HF_SCRIPTS_VERSION\", SCRIPTS_VERSION)\r\n with self.assertRaises(FileNotFoundError) as context:\r\n datasets.load_dataset(\"_dummy\")\r\n self.assertIn(\r\n f\"https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/{scripts_version}\/datasets\/_dummy\/_dummy.py\",\r\n> str(context.exception),\r\n )\r\nE AssertionError: 'https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.14.0\/datasets\/_dummy\/_dummy.py' not found in \"Couldn't find a dataset script at C:\\\\Users\\\\circleci\\\\datasets\\\\_dummy\\\\_dummy.py or any data file in the same directory. Couldn't find '_dummy' on the Hugging Face Hub either: FileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/master\/datasets\/_dummy\/_dummy.py\"\r\n\r\ntests\\test_load.py:358: AssertionError\r\n```\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3117\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3117\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3116","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3116\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3116\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3116\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3116","id":1031270611,"node_id":"PR_kwDODunzps4tbr6g","number":3116,"title":"Update doc links to point to new docs","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1634727647000,"updated_at":1634891368000,"closed_at":1634891205000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"This PR:\r\n* updates the README links and the ADD_NEW_DATASET template to point to the new docs (the new docs don't have a section with the list of all the possible features, so I added that info to the `Features` docstring, which is then referenced in the ADD_NEW_DATASET template)\r\n* fixes some broken links in the `.rst` files (fixed with the `make linkcheck` tool)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3116\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3116\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3116","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3116","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3116.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3116.patch","merged_at":1634891205000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3115","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3115\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3115\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3115\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3115","id":1030737524,"node_id":"PR_kwDODunzps4tZ-Vr","number":3115,"title":"Fill in dataset card for NCBI disease dataset","user":{"login":"edugp","id":17855740,"node_id":"MDQ6VXNlcjE3ODU1NzQw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17855740?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/edugp","html_url":"https:\/\/github.com\/edugp","followers_url":"https:\/\/api.github.com\/users\/edugp\/followers","following_url":"https:\/\/api.github.com\/users\/edugp\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/edugp\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/edugp\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/edugp\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/edugp\/orgs","repos_url":"https:\/\/api.github.com\/users\/edugp\/repos","events_url":"https:\/\/api.github.com\/users\/edugp\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/edugp\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1634677025000,"updated_at":1634891107000,"closed_at":1634891107000,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3115\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3115\/timeline","performed_via_github_app":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3115","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3115","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3115.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3115.patch","merged_at":1634891107000},"is_pull_request":true}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3114","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3114\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3114\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3114\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3114","id":1030693130,"node_id":"I_kwDODunzps49byEK","number":3114,"title":"load_from_disk in DatasetsDict\/Dataset not working with PyArrowHDFS wrapper implementing fsspec.spec.AbstractFileSystem","user":{"login":"francisco-perez-sorrosal","id":918006,"node_id":"MDQ6VXNlcjkxODAwNg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/918006?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal","html_url":"https:\/\/github.com\/francisco-perez-sorrosal","followers_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/followers","following_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/orgs","repos_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/repos","events_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["Hi ! Can you try again with pyarrow 6.0.0 ? I think it includes some changes regarding filesystems compatibility with fsspec.","Hi @lhoestq! I ended up using `fsspec.implementations.arrow.HadoopFileSystem` which doesn't have the problem I described with pyarrow 5.0.0.\r\n\r\nI'll try again with `PyArrowHDFS` once I update arrow to 6.0.0.\r\n\r\nThanks!"],"created_at":1634673705000,"updated_at":1637282123000,"closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"body":"## Describe the bug\r\nPassing a PyArrowHDFS implementation of fsspec.spec.AbstractFileSystem (in the `fs` param required by `load_from_disk` methods in `DatasetDict` (in datasets_dict.py) and `Dataset` (in arrow_dataset.py) results in an error when calling the download method in the `fs` parameter.\r\n\r\n\r\n## Steps to reproduce the bug\r\n\r\nThe documentation for the `fs` parameter states:\r\n\r\n```\r\nfs (:class:`~filesystems.S3FileSystem` or ``fsspec.spec.AbstractFileSystem``, optional, default ``None``):\r\n Instance of the remote filesystem used to download the files from.\r\n```\r\n\r\n`PyArrowHDFS` from [fsspec](https:\/\/filesystem-spec.readthedocs.io\/en\/latest\/_modules\/fsspec\/implementations\/hdfs.html) implements `fsspec.spec.AbstractFileSystem`. However, when using it as shown below, I get an error.\r\n\r\n```python\r\nfrom fsspec.implementations.hdfs import PyArrowHDFS\r\n...\r\ntransformed_corpus_path = \"\/user\/my_user\/clickbait\/transformed_ds\/\"\r\nfs = PyArrowHDFS(host, port, user, kerb_ticket=kerb_ticket)\r\ndss = DatasetDict.load_from_disk(transformed_corpus_path, fs, True)\r\n```\r\n\r\n## Expected results\r\n\r\nPrevious to load from disk, I have managed to successfully store in HDFS the data and meta-information of a DatasetDict by doing:\r\n```python\r\ntransformed_corpus_path = \"\/user\/my_user\/clickbait\/transformed_ds\/\"\r\nfs = PyArrowHDFS(host, port, user, kerb_ticket=kerb_ticket)\r\nmy_datasets.save_to_disk(transformed_corpus_path, fs=fs)\r\n```\r\n\r\nAs I have 3 datasets in the DatasetDict named `my_datasets`, the previous Python code creates the following contents in HDFS:\r\n\r\n```sh\r\n$ hadoop fs -ls \"\/user\/my_user\/clickbait\/transformed_ds\/\"\r\nFound 4 items\r\n-rw------- 3 my_user users 43 2021-10-19 03:08 \/user\/my_user\/clickbait\/transformed_ds\/dataset_dict.json\r\ndrwx------ - my_user users 0 2021-10-19 03:08 \/user\/my_user\/clickbait\/transformed_ds\/test\r\ndrwx------ - my_user users 0 2021-10-19 03:08 \/user\/my_user\/clickbait\/transformed_ds\/train\r\ndrwx------ - my_user users 0 2021-10-19 03:08 \/user\/my_user\/clickbait\/transformed_ds\/validation\r\n```\r\n\r\nI would expect to recover on `dss` the Arrow-backed datasets I previously saved in HDFS calling the `save_to_disk` method on the `DatasetDict` object when invoking `DatasetDict.load_from_disk(...)` as described above. \r\n\r\n## Actual results\r\n\r\nHowever, when trying to recover the saved datasets, I get this error:\r\n\r\n```\r\n...\r\n File \"\/home\/fperez\/dev\/neuromancer\/neuromancer\/corpus.py\", line 186, in load_transformed_corpus_from_disk\r\n dss = DatasetDict.load_from_disk(transformed_corpus_path, fs, True)\r\n File \"\/home\/fperez\/anaconda3\/envs\/neuromancer\/lib\/python3.9\/site-packages\/datasets\/dataset_dict.py\", line 748, in load_from_disk\r\n dataset_dict[k] = Dataset.load_from_disk(dataset_dict_split_path, fs, keep_in_memory=keep_in_memory)\r\n File \"\/home\/fperez\/anaconda3\/envs\/neuromancer\/lib\/python3.9\/site-packages\/datasets\/arrow_dataset.py\", line 1048, in load_from_disk\r\n fs.download(src_dataset_path, dataset_path.as_posix(), recursive=True)\r\n File \"pyarrow\/_hdfsio.pyx\", line 438, in pyarrow._hdfsio.HadoopFileSystem.download\r\nTypeError: download() got an unexpected keyword argument 'recursive'\r\n```\r\n\r\nExamining the [signature of the download method in pyarrow 5.0.0](https:\/\/github.com\/apache\/arrow\/blob\/54d2bd89c99df72fa091b025452f85dd5d88e3cf\/python\/pyarrow\/_hdfsio.pyx#L438) we can see that there's no download parameter:\r\n\r\n```python\r\n def download(self, path, stream, buffer_size=None):\r\n with self.open(path, 'rb') as f:\r\n f.download(stream, buffer_size=buffer_size)\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.13.3\r\n- Platform: Linux-3.10.0-1160.15.2.el7.x86_64-x86_64-with-glibc2.33\r\n- Python version: 3.9.7\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3114\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3114\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3113","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3113\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3113\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3113\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3113","id":1030667547,"node_id":"I_kwDODunzps49br0b","number":3113,"title":"Loading Data from HDF files","user":{"login":"FeryET","id":30388648,"node_id":"MDQ6VXNlcjMwMzg4NjQ4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30388648?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/FeryET","html_url":"https:\/\/github.com\/FeryET","followers_url":"https:\/\/api.github.com\/users\/FeryET\/followers","following_url":"https:\/\/api.github.com\/users\/FeryET\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/FeryET\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/FeryET\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/FeryET\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/FeryET\/orgs","repos_url":"https:\/\/api.github.com\/users\/FeryET\/repos","events_url":"https:\/\/api.github.com\/users\/FeryET\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/FeryET\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":[],"created_at":1634671606000,"updated_at":1634672568000,"closed_at":null,"author_association":"NONE","active_lock_reason":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nMore often than not I come along big HDF datasets, and currently there is no straight forward way to feed them to a dataset.\r\n\r\n**Describe the solution you'd like**\r\nI would love to see a `from_h5` method that gets an interface implemented by the user on how items are extracted from dataset (in case of multiple datasets containing elements like arrays and metadata and etc).\r\n\r\n**Describe alternatives you've considered**\r\nCurrently I manually load hdf files using `h5py` and implement PyTorch dataset interface. For small h5 files I load them into a pandas dataframe and use `from_pandas` function in the `datasets` package to load them, but for big datasets this is not feasible.\r\n\r\n**Additional context**\r\nHDF files are widespread throughout different domains and are one of the go to's for many researchers\/scientists\/engineers who work with numerical data. Given `datasets`' usecases have outgrown NLP use cases, it will make a lot of sense focusing on things like supporting HDF files.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3113\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3113\/timeline","performed_via_github_app":null,"draft":null,"pull_request":{"url":"","html_url":"","diff_url":"","patch_url":"","merged_at":0},"is_pull_request":false}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3112","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3112\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3112\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3112\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3112","id":1030613083,"node_id":"I_kwDODunzps49behb","number":3112,"title":"OverflowError: There was an overflow in the . Try to reduce writer_batch_size to have batches smaller than 2GB","user":{"login":"BenoitDalFerro","id":69694610,"node_id":"MDQ6VXNlcjY5Njk0NjEw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/69694610?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BenoitDalFerro","html_url":"https:\/\/github.com\/BenoitDalFerro","followers_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/followers","following_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/orgs","repos_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/repos","events_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"assignees":[],"milestone":{"url":"","html_url":"","labels_url":"","id":0,"node_id":"","number":0,"title":"","description":"","creator":{"login":"","id":0,"node_id":"","avatar_url":"","gravatar_id":"","url":"","html_url":"","followers_url":"","following_url":"","gists_url":"","starred_url":"","subscriptions_url":"","organizations_url":"","repos_url":"","events_url":"","received_events_url":"","type":"","site_admin":false},"open_issues":0,"closed_issues":0,"state":"","created_at":0,"updated_at":0,"due_on":0,"closed_at":null},"comments":["I am very unsure on why you tagged me here. I am not a maintainer of the Datasets library and have no idea how to help you.","fixed","Ok got it, tensor full of NaNs, cf.\r\n\r\n~\\anaconda3\\envs\\xxx\\lib\\site-packages\\datasets\\arrow_writer.py in write_examples_on_file(self)\r\n315 # This check fails with FloatArrays with nans, which is not what we want, so account for that:","Actually this is is a live bug, documented yet still live so reopening"],"created_at":1634667701000,"updated_at":1634669549000,"closed_at":null,"author_association":"NONE","active_lock_reason":null,"body":"## Describe the bug\r\nDespite having batches way under 2Gb when running `datasets.map()`, after processing correctly the data of the first batch without fuss and irrespective of writer_batch_size (say 2,4,8,16,32,64 and 128 in my case), it returns the following error :\r\n\r\n> OverflowError: There was an overflow in the . Try to reduce writer_batch_size to have batches smaller than 2GB\r\n\r\nNote that I always run `batch_size=writer_batch_size` :\r\n\r\n## Steps to reproduce the bug\r\n```python\r\ndatasets.map(lambda example : {\"column_name\" : function(arguments)}, batched=False, remove_columns = datasets.column_names, batch_size=batch_size, writer_batch_size=batch_size, disable_nullable=True, num_proc=None, desc=\"blablabla\")\r\n```\r\n## Introspecting CUDA memory during bug\r\n\r\nPlaced within `function(arguments)` the following statement to introspect memory usage, merely a little over 1\/4 of 2Gb\r\n`print(torch.cuda.memory_summary(device=device, abbreviated=False))`\r\n\r\n> |===========================================================================|\r\n | PyTorch CUDA memory summary, device ID 0 |\r\n |---------------------------------------------------------------------------|\r\n | CUDA OOMs: 0 | cudaMalloc retries: 0 |\r\n |===========================================================================|\r\n | Metric | Cur Usage | Peak Usage | Tot Alloc | Tot Freed |\r\n |---------------------------------------------------------------------------|\r\n | Allocated memory | 541418 KB | 545725 KB | 555695 KB | 14276 KB |\r\n | from large pool | 540672 KB | 544431 KB | 544431 KB | 3759 KB |\r\n | from small pool | 746 KB | 1714 KB | 11264 KB | 10517 KB |\r\n |---------------------------------------------------------------------------|\r\n | Active memory | 541418 KB | 545725 KB | 555695 KB | 14276 KB |\r\n | from large pool | 540672 KB | 544431 KB | 544431 KB | 3759 KB |\r\n | from small pool | 746 KB | 1714 KB | 11264 KB | 10517 KB |\r\n |---------------------------------------------------------------------------|\r\n | GPU reserved memory | 598016 KB | 598016 KB | 598016 KB | 0 B |\r\n | from large pool | 595968 KB | 595968 KB | 595968 KB | 0 B |\r\n | from small pool | 2048 KB | 2048 KB | 2048 KB | 0 B |\r\n |---------------------------------------------------------------------------|\r\n | Non-releasable memory | 36117 KB | 52292 KB | 274275 KB | 238158 KB |\r\n| from large pool | 34816 KB | 51537 KB | 261713 KB | 226897 KB |\r\n | from small pool | 1301 KB | 2045 KB | 12562 KB | 11261 KB |\r\n |---------------------------------------------------------------------------|\r\n | Allocations | 198 | 224 | 478 | 280 |\r\n | from large pool | 74 | 75 | 75 | 1 |\r\n | from small pool | 124 | 150 | 403 | 279 |\r\n |---------------------------------------------------------------------------|\r\n | Active allocs | 198 | 224 | 478 | 280 |\r\n | from large pool | 74 | 75 | 75 | 1 |\r\n | from small pool | 124 | 150 | 403 | 279 |\r\n |---------------------------------------------------------------------------|\r\n | GPU reserved segments | 21 | 21 | 21 | 0 |\r\n | from large pool | 20 | 20 | 20 | 0 |\r\n | from small pool | 1 | 1 | 1 | 0 |\r\n |---------------------------------------------------------------------------|\r\n | Non-releasable allocs | 18 | 23 | 166 | 148 |\r\n | from large pool | 17 | 18 | 19 | 2 |\r\n | from small pool | 1 | 6 | 147 | 146 |\r\n |===========================================================================|\r\n\r\n## Expected results\r\nEfficiently process the datasets and write it down to disk.\r\n\r\n## Actual results\r\n--------------------------------------------------------------------------\r\nOverflowError Traceback (most recent call last)\r\n~\\anaconda3\\envs\\xxx\\lib\\site-packages\\datasets\\arrow_dataset.py in _map_single(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, new_fingerprint, rank, offset, disable_tqdm, desc, cache_only)\r\n 2390 else:\r\n-> 2391 writer.write(example)\r\n 2392 else:\r\n\r\n~\\anaconda3\\envs\\xxx\\lib\\site-packages\\datasets\\arrow_writer.py in write(self, example, key, writer_batch_size)\r\n 367 \r\n--> 368 self.write_examples_on_file()\r\n 369 \r\n\r\n~\\anaconda3\\envs\\xxx\\lib\\site-packages\\datasets\\arrow_writer.py in write_examples_on_file(self)\r\n 316 if not isinstance(pa_array[0], pa.lib.FloatScalar):\r\n--> 317 raise OverflowError(\r\n 318 \"There was an overflow in the {}. Try to reduce writer_batch_size to have batches smaller than 2GB\".format(\r\n\r\nOverflowError: There was an overflow in the . Try to reduce writer_batch_size to have batches smaller than 2GB\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nOverflowError Traceback (most recent call last)\r\n~\\AppData\\Local\\Temp\/ipykernel_16268\/2456940807.py in